From 1a5d0b51642eb70f71e693355e86212d3607b7b3 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 20 Oct 2023 15:54:33 +0000 Subject: [PATCH 01/34] Split clean-up into separate job (#1906) Moves the clean-up that was previously done in the archive jobs into their own separate job. The clean-up is also streamlined considerably by using only `COM_TOP` instead of going through every template. There is also additional streamlining/corrections in the function that does the actual removing. Some settings used by both jobs were elevated to `config.base`. Others only needed for cleanup were moved to the new config for that job. Also corrects a small error encountered when attempting to rerun an ensemble forecast. Resolves #583 Resolves #1872 --- jobs/JGLOBAL_CLEANUP | 17 ++ jobs/rocoto/arch.sh | 1 - jobs/rocoto/cleanup.sh | 19 +++ parm/config/gfs/config.arch | 9 - parm/config/gfs/config.base.emc.dyn | 5 +- parm/config/gfs/config.cleanup | 25 +++ parm/config/gfs/config.resources | 9 +- scripts/exgdas_enkf_earc.sh | 168 ------------------ scripts/exgdas_enkf_fcst.sh | 2 + scripts/exglobal_archive.sh | 189 --------------------- scripts/exglobal_cleanup.sh | 106 ++++++++++++ workflow/applications/gfs_cycled.py | 6 +- workflow/applications/gfs_forecast_only.py | 4 +- workflow/rocoto/gfs_tasks.py | 17 ++ workflow/rocoto/tasks.py | 2 +- 15 files changed, 204 insertions(+), 375 deletions(-) create mode 100755 jobs/JGLOBAL_CLEANUP create mode 100755 jobs/rocoto/cleanup.sh create mode 100644 parm/config/gfs/config.cleanup create mode 100755 scripts/exglobal_cleanup.sh diff --git a/jobs/JGLOBAL_CLEANUP b/jobs/JGLOBAL_CLEANUP new file mode 100755 index 0000000000..ad938ccf60 --- /dev/null +++ b/jobs/JGLOBAL_CLEANUP @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup" + +"${HOMEgfs}/scripts/exglobal_cleanup.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 + diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index 2f62d8b354..d949b7d76f 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -16,5 +16,4 @@ export jobid="${job}.$$" "${HOMEgfs}"/jobs/JGLOBAL_ARCHIVE status=$? - exit "${status}" diff --git a/jobs/rocoto/cleanup.sh b/jobs/rocoto/cleanup.sh new file mode 100755 index 0000000000..96303fde57 --- /dev/null +++ b/jobs/rocoto/cleanup.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. "${HOMEgfs}"/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="cleanup" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGLOBAL_CLEANUP +status=$? + +exit "${status}" diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch index 31a3713fb1..a23bcce6ae 100644 --- a/parm/config/gfs/config.arch +++ b/parm/config/gfs/config.arch @@ -12,13 +12,4 @@ export ARCH_GAUSSIAN="YES" export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} -#--online archive of nemsio files for fit2obs verification -export FITSARC="YES" -export FHMAX_FITS=132 -[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} - -#--starting and ending hours of previous cycles to be removed from rotating directory -export RMOLDSTD=144 -export RMOLDEND=24 - echo "END: config.arch" diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index 09d8897a31..b77787794c 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -394,6 +394,9 @@ export ARCH_CYC=00 # Archive data at this cycle for warm_start capabil export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability -export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} echo "END: config.base" diff --git a/parm/config/gfs/config.cleanup b/parm/config/gfs/config.cleanup new file mode 100644 index 0000000000..1908c91bb5 --- /dev/null +++ b/parm/config/gfs/config.cleanup @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.cleanup ########## +echo "BEGIN: config.cleanup" + +# Get task specific resources +source "${EXPDIR}/config.resources" cleanup + +export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +# Specify the list of files to exclude from the first stage of cleanup +# Because arrays cannot be exported, list is a single string of comma- +# separated values. This string is split to form an array at runtime. +case ${RUN} in + gdas | gfs) exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;; + enkf*) exclude_string="*f006.ens*" ;; + *) exclude_string="" ;; +esac +export exclude_string + +echo "END: config.cleanup" \ No newline at end of file diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index d6654b61ed..6503ae5523 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -14,7 +14,7 @@ if [[ $# -ne 1 ]]; then echo "atmensanlinit atmensanlrun atmensanlfinal" echo "landanl" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch cleanup echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" @@ -773,6 +773,13 @@ elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then eval "export memory_${step}=50GB" fi +elif [[ ${step} == "cleanup" ]]; then + export wtime_cleanup="01:00:00" + export npe_cleanup=1 + export npe_node_cleanup=1 + export nth_cleanup=1 + export memory_cleanup="4096M" + elif [[ ${step} = "stage_ic" ]]; then export wtime_stage_ic="00:15:00" diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh index 1bb941f888..a1bcba4d79 100755 --- a/scripts/exgdas_enkf_earc.sh +++ b/scripts/exgdas_enkf_earc.sh @@ -133,172 +133,4 @@ if [ "${ENSGRP}" -eq 0 ]; then "gsistat.${RUN}.${PDY}${cyc}.ensmean" fi - -if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then - exit 0 -fi - -############################################################### -# ENSGRP 0 also does clean-up -############################################################### -if [[ "${ENSGRP}" -eq 0 ]]; then - function remove_files() { - # TODO: move this to a new location - local directory=$1 - shift - if [[ ! -d ${directory} ]]; then - echo "No directory ${directory} to remove files from, skiping" - return - fi - local exclude_list="" - if (($# > 0)); then - exclude_list=$* - fi - local file_list - declare -a file_list - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - readarray -t file_list < <(find -L "${directory}" -type f) - if (( ${#file_list[@]} == 0 )); then return; fi - for exclude in ${exclude_list}; do - echo "Excluding ${exclude}" - declare -a file_list_old=("${file_list[@]}") - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") - if (( ${#file_list[@]} == 0 )); then return; fi - done - - for file in "${file_list[@]}"; do - rm -f "${file}" - done - # Remove directory if empty - rmdir "${directory}" || true - } - - # Start start and end dates to remove - GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}" "${PDY}${cyc}") - GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") - - while [ "${GDATE}" -le "${GDATEEND}" ]; do - - gPDY="${GDATE:0:8}" - gcyc="${GDATE:8:2}" - - if [[ -d ${COM_TOP} ]]; then - rocotolog="${EXPDIR}/logs/${GDATE}.log" - if [[ -f "${rocotolog}" ]]; then - set +e - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") - rc=$? - set_strict - if [ "${rc}" -eq 0 ]; then - case ${CDUMP} in - gdas) nmem="${NMEM_ENS}";; - gfs) nmem="${NMEM_ENS_GFS}";; - *) - echo "FATAL ERROR: Unknown CDUMP ${CDUMP} during cleanup" - exit 10 - ;; - esac - - readarray memlist< <(seq --format="mem%03g" 1 "${nmem}") - memlist+=("ensstat") - - for mem in "${memlist[@]}"; do - # Atmos - exclude_list="f006.ens" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Wave - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ocean - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ice - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Aerosols (GOCART) - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Mediator - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - done - fi - fi - fi - - # Remove any empty directories - YMD=${gPDY} HH=${gcyc} generate_com target_dir:COM_TOP_TMPL - target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" - if [[ -d ${target_dir} ]]; then - find "${target_dir}" -empty -type d -delete - fi - - # Advance to next cycle - GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") - done -fi - -# Remove enkf*.$rPDY for the older of GDATE or RDATE -GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") -fhmax=${FHMAX_GFS} -RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") -if [ "${GDATE}" -lt "${RDATE}" ]; then - RDATE=${GDATE} -fi -rPDY=$(echo "${RDATE}" | cut -c1-8) -clist="enkfgdas enkfgfs" -for ctype in ${clist}; do - COMIN="${ROTDIR}/${ctype}.${rPDY}" - [[ -d ${COMIN} ]] && rm -rf "${COMIN}" -done - -############################################################### - - exit 0 diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh index 85344e4e35..7eb2a3a711 100755 --- a/scripts/exgdas_enkf_fcst.sh +++ b/scripts/exgdas_enkf_fcst.sh @@ -122,7 +122,9 @@ for imem in $(seq "${ENSBEG}" "${ENSEND}"); do skip_mem="NO" if [[ -f ${EFCSGRP}.fail ]]; then + set +e memstat=$(grep "MEMBER ${ENSMEM}" "${EFCSGRP}.fail" | grep -c "PASS") + set_strict [[ ${memstat} -eq 1 ]] && skip_mem="YES" fi diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 5fea07f4ed..78a6d60b65 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -284,193 +284,4 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then fi ##end of HPSS archive ############################################################### - - -############################################################### -# Clean up previous cycles; various depths -# PRIOR CYCLE: Leave the prior cycle alone -GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") - -# PREVIOUS to the PRIOR CYCLE -GDATE=$(${NDATE} -"${assim_freq}" "${GDATE}") -gPDY="${GDATE:0:8}" -gcyc="${GDATE:8:2}" - -# Remove the TMPDIR directory -# TODO Only prepbufr is currently using this directory, and all jobs should be -# cleaning up after themselves anyway -COMIN="${DATAROOT}/${GDATE}" -[[ -d ${COMIN} ]] && rm -rf "${COMIN}" - -if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then - exit 0 -fi - -# Step back every assim_freq hours and remove old rotating directories -# for successful cycles (defaults from 24h to 120h). -# Retain files needed by Fit2Obs -# TODO: This whole section needs to be revamped to remove marine component -# directories and not look at the rocoto log. -GDATEEND=$(${NDATE} -"${RMOLDEND:-24}" "${PDY}${cyc}") -GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") -RTOFS_DATE=$(${NDATE} -48 "${PDY}${cyc}") -function remove_files() { - # TODO: move this to a new location - local directory=$1 - shift - if [[ ! -d ${directory} ]]; then - echo "No directory ${directory} to remove files from, skiping" - return - fi - local exclude_list="" - if (($# > 0)); then - exclude_list=$* - fi - local file_list - declare -a file_list - readarray -t file_list < <(find -L "${directory}" -type f) - if (( ${#file_list[@]} == 0 )); then return; fi - # echo "Number of files to remove before exclusions: ${#file_list[@]}" - for exclude in ${exclude_list}; do - echo "Excluding ${exclude}" - declare -a file_list_old=("${file_list[@]}") - readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") - # echo "Number of files to remove after exclusion: ${#file_list[@]}" - if (( ${#file_list[@]} == 0 )); then return; fi - done - # echo "Number of files to remove after exclusions: ${#file_list[@]}" - - for file in "${file_list[@]}"; do - rm -f "${file}" - done - # Remove directory if empty - rmdir "${directory}" || true -} - -while [ "${GDATE}" -le "${GDATEEND}" ]; do - gPDY="${GDATE:0:8}" - gcyc="${GDATE:8:2}" - COMINrtofs="${ROTDIR}/rtofs.${gPDY}" - if [ -d "${COM_TOP}" ]; then - rocotolog="${EXPDIR}/logs/${GDATE}.log" - if [ -f "${rocotolog}" ]; then - set +e - testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") - rc=$? - set_strict - - if [ "${rc}" -eq 0 ]; then - # Obs - exclude_list="prepbufr" - templates="COM_OBS" - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Atmos - exclude_list="cnvstat atmanl.nc" - templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Wave - exclude_list="" - templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ocean - exclude_list="" - templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ice - exclude_list="" - templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Aerosols (GOCART) - exclude_list="" - templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Mediator - exclude_list="" - templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') - for template in ${templates}; do - YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - if [ -d "${COMINrtofs}" ] && [ "${GDATE}" -lt "${RTOFS_DATE}" ]; then rm -rf "${COMINrtofs}" ; fi - fi - fi - fi - - # Remove mdl gfsmos directory - if [ "${RUN}" = "gfs" ]; then - COMIN="${ROTDIR}/gfsmos.${gPDY}" - if [ -d "${COMIN}" ] && [ "${GDATE}" -lt "${CDATE_MOS}" ]; then rm -rf "${COMIN}" ; fi - fi - - # Remove any empty directories - target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" - if [[ -d ${target_dir} ]]; then - find "${target_dir}" -empty -type d -delete - fi - - GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") -done - -# Remove archived gaussian files used for Fit2Obs in $VFYARC that are -# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived -# gaussian files to prevent the files from being removed by automatic -# scrubber present on some machines. - -if [ "${RUN}" = "gfs" ]; then - fhmax=$((FHMAX_FITS+36)) - RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") - rPDY=$(echo "${RDATE}" | cut -c1-8) - COMIN="${VFYARC}/${RUN}.${rPDY}" - [[ -d ${COMIN} ]] && rm -rf "${COMIN}" - - TDATE=$(${NDATE} -"${FHMAX_FITS}" "${PDY}${cyc}") - while [ "${TDATE}" -lt "${PDY}${cyc}" ]; do - tPDY=$(echo "${TDATE}" | cut -c1-8) - tcyc=$(echo "${TDATE}" | cut -c9-10) - TDIR=${VFYARC}/${RUN}.${tPDY}/${tcyc} - [[ -d ${TDIR} ]] && touch "${TDIR}"/* - TDATE=$(${NDATE} +6 "${TDATE}") - done -fi - -# Remove $RUN.$rPDY for the older of GDATE or RDATE -GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") -fhmax=${FHMAX_GFS} -RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") -if [ "${GDATE}" -lt "${RDATE}" ]; then - RDATE=${GDATE} -fi -rPDY=$(echo "${RDATE}" | cut -c1-8) -COMIN="${ROTDIR}/${RUN}.${rPDY}" -[[ -d ${COMIN} ]] && rm -rf "${COMIN}" - - -############################################################### - - exit 0 diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh new file mode 100755 index 0000000000..5d7c0a9788 --- /dev/null +++ b/scripts/exglobal_cleanup.sh @@ -0,0 +1,106 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Clean up previous cycles; various depths +# PRIOR CYCLE: Leave the prior cycle alone +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${assim_freq} hours") +# PREVIOUS to the PRIOR CYCLE +GDATE=$(date --utc +%Y%m%d%H -d "${GDATE:0:8} ${GDATE:8:2} -${assim_freq} hours") + +# Remove the TMPDIR directory +# TODO Only prepbufr is currently using this directory, and all jobs should be +# cleaning up after themselves anyway +COMIN="${DATAROOT}/${GDATE}" +[[ -d ${COMIN} ]] && rm -rf "${COMIN}" + +if [[ "${CLEANUP_COM:-YES}" == NO ]] ; then + exit 0 +fi + +# Step back every assim_freq hours and remove old rotating directories +# for successful cycles (defaults from 24h to 120h). +# Retain files needed by Fit2Obs +last_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDEND:-24} hours" ) +first_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours") +last_rtofs=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDRTOFS:-48} hours") +function remove_files() { + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local find_exclude_string="" + for exclude in "$@"; do + find_exclude_string+="${find_exclude_string} -name ${exclude} -or" + done + # Chop off any trailing or + find_exclude_string="${find_exclude_string[*]/%-or}" + # Remove all regular files that do not match + # shellcheck disable=SC2086 + find "${directory}" -type f -not \( ${find_exclude_string} \) -delete + # Remove all symlinks that do not match + # shellcheck disable=SC2086 + find "${directory}" -type l -not \( ${find_exclude_string} \) -delete + # Remove any empty directories + find "${directory}" -type d -empty -delete +} + +for (( current_date=first_date; current_date <= last_date; \ + current_date=$(date --utc +%Y%m%d%H -d "${current_date:0:8} ${current_date:8:2} +${assim_freq} hours") )); do + current_PDY="${current_date:0:8}" + current_cyc="${current_date:8:2}" + rtofs_dir="${ROTDIR}/rtofs.${current_PDY}" + rocotolog="${EXPDIR}/logs/${current_date}.log" + if [[ -f "${rocotolog}" ]]; then + # TODO: This needs to be revamped to not look at the rocoto log. + # shellcheck disable=SC2312 + if [[ $(tail -n 1 "${rocotolog}") =~ "This cycle is complete: Success" ]]; then + YMD="${current_PDY}" HH="${current_cyc}" generate_com COM_TOP + if [[ -d "${COM_TOP}" ]]; then + IFS=", " read -r -a exclude_list <<< "${exclude_string:-}" + remove_files "${COM_TOP}" "${exclude_list[@]:-}" + fi + if [[ -d "${rtofs_dir}" ]] && (( current_date < last_rtofs )); then rm -rf "${rtofs_dir}" ; fi + fi + fi + + # Remove mdl gfsmos directory + if [[ "${RUN}" == "gfs" ]]; then + mos_dir="${ROTDIR}/gfsmos.${current_PDY}" + if [[ -d "${mos_dir}" ]] && (( current_date < CDATE_MOS )); then rm -rf "${mos_dir}" ; fi + fi +done + +# Remove archived gaussian files used for Fit2Obs in $VFYARC that are +# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived +# gaussian files to prevent the files from being removed by automatic +# scrubber present on some machines. + +if [[ "${RUN}" == "gfs" ]]; then + fhmax=$((FHMAX_FITS + 36)) + RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${fhmax} hours") + verify_dir="${ROTDIR}/vrfyarch/${RUN}.${RDATE:0:8}" + [[ -d ${verify_dir} ]] && rm -rf "${verify_dir}" + + touch_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_FITS} hours") + while (( touch_date < "${PDY}${cyc}" )); do + touch_PDY="${touch_date:0:8}" + touch_cyc="${touch_date:8:2}" + touch_dir="${ROTDIR}/vrfyarch/${RUN}.${touch_PDY}/${touch_cyc}" + [[ -d ${touch_dir} ]] && touch "${touch_dir}"/* + touch_date=$(date --utc +%Y%m%d%H -d "${touch_PDY} ${touch_cyc} +6 hours") + done +fi + +# Remove $RUN.$rPDY for the older of GDATE or RDATE +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours") +RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_GFS} hours") +if (( GDATE < RDATE )); then + RDATE=${GDATE} +fi +deletion_target="${ROTDIR}/${RUN}.${RDATE:0:8}" +if [[ -d ${deletion_target} ]]; then rm -rf "${deletion_target}"; fi diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 633e93bac0..6eff929d5f 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -47,7 +47,7 @@ def _get_app_configs(self): if self.do_ocean: configs += ['ocnpost'] - configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch'] + configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch', 'cleanup'] if self.do_hybvar: if self.do_jediatmens: @@ -106,7 +106,7 @@ def get_task_names(self): # gdas_gfs_common_tasks_after_fcst += ['ocnpost'] gdas_gfs_common_tasks_after_fcst += ['vrfy'] - gdas_gfs_common_cleanup_tasks = ['arch'] + gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] @@ -137,7 +137,7 @@ def get_task_names(self): else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] - hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc'] + hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index e6d1ab35a2..73e17ee7aa 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -15,7 +15,7 @@ def _get_app_configs(self): Returns the config_files that are involved in the forecast-only app """ - configs = ['stage_ic', 'fcst', 'arch'] + configs = ['stage_ic', 'fcst', 'arch', 'cleanup'] if self.do_atm: configs += ['post', 'vrfy'] @@ -109,6 +109,6 @@ def get_task_names(self): if self.do_wafs: tasks += ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', 'wafsblending', 'wafsblending0p25'] - tasks += ['arch'] # arch **must** be the last task + tasks += ['arch', 'cleanup'] # arch and cleanup **must** be the last tasks return {f"{self._base['CDUMP']}": tasks} diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 33e2ec82f3..e41e4ebcc1 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -944,6 +944,23 @@ def arch(self): return task + # Cleanup + def cleanup(self): + deps = [] + if 'enkf' in self.cdump: + dep_dict = {'type': 'metatask', 'name': 'enkfgdaseamn'} + deps.append(rocoto.add_dependency(dep_dict)) + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('cleanup') + task = create_wf_task('cleanup', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + # Start of ensemble tasks def eobs(self): deps = [] diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index b9716c938e..29ed57daf2 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -11,7 +11,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'stage_ic', - 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', From 08ce4f8d3ed8e07b4d488a80b5054c6206b04404 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 20 Oct 2023 16:14:53 +0000 Subject: [PATCH 02/34] Fix enkfgfs cleanup dependency (#1941) When #1906 was merged, the dependency for enkf cycles was hard-coded to use the enkfgdas archive instead of depending on the `RUN`. --- workflow/rocoto/gfs_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index e41e4ebcc1..56449cb9d5 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -948,7 +948,7 @@ def arch(self): def cleanup(self): deps = [] if 'enkf' in self.cdump: - dep_dict = {'type': 'metatask', 'name': 'enkfgdaseamn'} + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eamn'} deps.append(rocoto.add_dependency(dep_dict)) else: dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'} From 4b5cd0bc435fc158258ca38c3d5f44add6b60469 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 23 Oct 2023 12:04:26 -0400 Subject: [PATCH 03/34] Fix nth_eupd in gfs/config.resources. remove sections of jobs not run as part of gefs from gefs/config.resources (#1952) --- parm/config/gefs/config.resources | 407 +----------------------------- parm/config/gfs/config.resources | 4 +- 2 files changed, 3 insertions(+), 408 deletions(-) diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 91699a9886..33156a768a 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -9,17 +9,12 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "coupled_ic aerosol_init" - echo "atmanlinit atmanlrun atmanlfinal" - echo "atmensanlinit atmensanlrun atmensanlfinal" - echo "landanlprep landanlinit landanlrun landanlfinal" - echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" - echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak" - echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -168,303 +163,6 @@ elif [[ ${step} = "waveawipsgridded" ]]; then export NTASKS=${npe_waveawipsgridded} export memory_waveawipsgridded_gfs="1GB" -elif [[ "${step}" = "atmanlinit" ]]; then - - export wtime_atmanlinit="00:10:00" - export npe_atmanlinit=1 - export nth_atmanlinit=1 - npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) - export npe_node_atmanlinit - export memory_atmanlinit="3072M" - -elif [[ "${step}" = "atmanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmanlrun="00:30:00" - npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun - npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun_gfs - export nth_atmanlrun=1 - export nth_atmanlrun_gfs=${nth_atmanlrun} - npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) - export npe_node_atmanlrun - export is_exclusive=True - -elif [[ "${step}" = "atmanlfinal" ]]; then - - export wtime_atmanlfinal="00:30:00" - export npe_atmanlfinal=${npe_node_max} - export nth_atmanlfinal=1 - npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) - export npe_node_atmanlfinal - export is_exclusive=True - -elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then - # below lines are for creating JEDI YAML - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=1 - layout_y=1 - ;; - *) - echo "FATAL ERROR: Resolution not supported for land analysis'" - exit 1 - esac - - export layout_x - export layout_y - - if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then - declare -x "wtime_${step}"="00:10:00" - declare -x "npe_${step}"=1 - declare -x "nth_${step}"=1 - temp_stepname="nth_${step}" - declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" - declare -x "memory_${step}"="3072M" - elif [[ "${step}" = "landanlrun" ]]; then - export wtime_landanlrun="00:30:00" - npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanlrun - export nth_landanlrun=1 - npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) - export npe_node_landanlrun - export is_exclusive=True - elif [[ "${step}" = "landanlprep" ]]; then - export wtime_landanlprep="00:30:00" - npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanlprep - export nth_landanlprep=1 - npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc) - export npe_node_landanlprep - export is_exclusive=True - fi - -elif [[ "${step}" = "aeroanlinit" ]]; then - - # below lines are for creating JEDI YAML - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=8 - layout_y=8 - ;; - *) - echo "FATAL ERROR: Resolution not supported for aerosol analysis'" - exit 1 - esac - - export layout_x - export layout_y - - export wtime_aeroanlinit="00:10:00" - export npe_aeroanlinit=1 - export nth_aeroanlinit=1 - npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) - export npe_node_aeroanlinit - export memory_aeroanlinit="3072M" - -elif [[ "${step}" = "aeroanlrun" ]]; then - - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=8 - layout_y=8 - ;; - *) - echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" - exit 1 - esac - - export layout_x - export layout_y - - export wtime_aeroanlrun="00:30:00" - npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun - npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun_gfs - export nth_aeroanlrun=1 - export nth_aeroanlrun_gfs=1 - npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) - export npe_node_aeroanlrun - export is_exclusive=True - -elif [[ "${step}" = "aeroanlfinal" ]]; then - - export wtime_aeroanlfinal="00:10:00" - export npe_aeroanlfinal=1 - export nth_aeroanlfinal=1 - npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) - export npe_node_aeroanlfinal - export memory_aeroanlfinal="3072M" - -elif [[ "${step}" = "ocnanalprep" ]]; then - - export wtime_ocnanalprep="00:10:00" - export npe_ocnanalprep=1 - export nth_ocnanalprep=1 - npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) - export npe_node_ocnanalprep - export memory_ocnanalprep="24GB" - -elif [[ "${step}" = "ocnanalbmat" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 - ;; - C48) - npes=16 - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - - export wtime_ocnanalbmat="00:30:00" - export npe_ocnanalbmat=${npes} - export nth_ocnanalbmat=1 - export is_exclusive=True - npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) - export npe_node_ocnanalbmat - -elif [[ "${step}" = "ocnanalrun" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 - ;; - C48) - npes=16 - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - - export wtime_ocnanalrun="00:30:00" - export npe_ocnanalrun=${npes} - export nth_ocnanalrun=1 - export is_exclusive=True - npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) - export npe_node_ocnanalrun - -elif [[ "${step}" = "ocnanalchkpt" ]]; then - - export wtime_ocnanalchkpt="00:10:00" - export npe_ocnanalchkpt=1 - export nth_ocnanalchkpt=1 - npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) - export npe_node_ocnanalchkpt - case ${CASE} in - C384) - export memory_ocnanalchkpt="128GB" - ;; - C48) - export memory_ocnanalchkpt="32GB" - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - -elif [[ "${step}" = "ocnanalpost" ]]; then - - export wtime_ocnanalpost="00:30:00" - export npe_ocnanalpost=${npe_node_max} - export nth_ocnanalpost=1 - npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) - export npe_node_ocnanalpost - -elif [[ "${step}" = "ocnanalvrfy" ]]; then - - export wtime_ocnanalvrfy="00:35:00" - export npe_ocnanalvrfy=1 - export nth_ocnanalvrfy=1 - npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) - export npe_node_ocnanalvrfy - export memory_ocnanalvrfy="24GB" - -elif [[ ${step} = "anal" ]]; then - - export wtime_anal="00:50:00" - export wtime_anal_gfs="00:40:00" - export npe_anal=780 - export nth_anal=5 - export npe_anal_gfs=825 - export nth_anal_gfs=5 - if [[ "${machine}" = "WCOSS2" ]]; then - export nth_anal=8 - export nth_anal_gfs=8 - fi - if [[ ${CASE} = "C384" ]]; then - export npe_anal=160 - export npe_anal_gfs=160 - export nth_anal=10 - export nth_anal_gfs=10 - if [[ ${machine} = "S4" ]]; then - #On the S4-s4 partition, this is accomplished by increasing the task - #count to a multiple of 32 - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_anal=416 - export npe_anal_gfs=416 - fi - #S4 is small, so run this task with just 1 thread - export nth_anal=1 - export nth_anal_gfs=1 - export wtime_anal="02:00:00" - fi - fi - if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_anal=84 - export npe_anal_gfs=84 - if [[ ${machine} = "S4" ]]; then - export nth_anal=4 - export nth_anal_gfs=4 - #Adjust job count for S4 - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_anal=88 - export npe_anal_gfs=88 - elif [[ ${PARTITION_BATCH} = "ivy" ]]; then - export npe_anal=90 - export npe_anal_gfs=90 - fi - fi - fi - npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) - export npe_node_anal - export nth_cycle=${nth_anal} - npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) - export npe_node_cycle - export is_exclusive=True - elif [[ ${step} = "analcalc" ]]; then export wtime_analcalc="00:10:00" @@ -726,107 +424,6 @@ elif [[ ${step} = "coupled_ic" ]]; then export nth_coupled_ic=1 export is_exclusive=True -elif [[ "${step}" = "atmensanlinit" ]]; then - - export wtime_atmensanlinit="00:10:00" - export npe_atmensanlinit=1 - export nth_atmensanlinit=1 - npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) - export npe_node_atmensanlinit - export memory_atmensanlinit="3072M" - -elif [[ "${step}" = "atmensanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmensanlrun="00:30:00" - npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun - npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun_gfs - export nth_atmensanlrun=1 - export nth_atmensanlrun_gfs=${nth_atmensanlrun} - npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) - export npe_node_atmensanlrun - export is_exclusive=True - -elif [[ "${step}" = "atmensanlfinal" ]]; then - - export wtime_atmensanlfinal="00:30:00" - export npe_atmensanlfinal=${npe_node_max} - export nth_atmensanlfinal=1 - npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) - export npe_node_atmensanlfinal - export is_exclusive=True - -elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then - - export wtime_eobs="00:15:00" - export wtime_eomg="01:00:00" - if [[ ${CASE} = "C768" ]]; then - export npe_eobs=200 - elif [[ ${CASE} = "C384" ]]; then - export npe_eobs=100 - elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_eobs=40 - fi - export npe_eomg=${npe_eobs} - export nth_eobs=2 - export nth_eomg=${nth_eobs} - npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) - export npe_node_eobs - export npe_node_eomg=${npe_node_eobs} - export is_exclusive=True - #The number of tasks and cores used must be the same for eobs - #For S4, this is accomplished by running 10 tasks/node - if [[ ${machine} = "S4" ]]; then - export npe_node_eobs=10 - fi - -elif [[ ${step} = "ediag" ]]; then - - export wtime_ediag="00:15:00" - export npe_ediag=48 - export nth_ediag=1 - npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) - export npe_node_ediag - export memory_ediag="30GB" - -elif [[ ${step} = "eupd" ]]; then - - export wtime_eupd="00:30:00" - if [[ ${CASE} = "C768" ]]; then - export npe_eupd=480 - export nth_eupd=6 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - fi - elif [[ ${CASE} = "C384" ]]; then - export npe_eupd=270 - export nth_eupd=8 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=8 - elif [[ ${machine} = "S4" ]]; then - export npe_eupd=160 - export nth_eupd=2 - fi - elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_eupd=42 - export nth_eupd=2 - if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=4 - fi - fi - npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) - export npe_node_eupd - export is_exclusive=True - elif [[ ${step} = "ecen" ]]; then export wtime_ecen="00:10:00" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 6503ae5523..9919b81b7e 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -868,12 +868,10 @@ elif [[ ${step} = "eupd" ]]; then fi elif [[ ${CASE} = "C384" ]]; then export npe_eupd=270 - export nth_eupd=2 + export nth_eupd=8 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 - elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=8 elif [[ ${machine} = "S4" ]]; then export npe_eupd=160 export nth_eupd=2 From 8940adddfe9d21189740e71487603fb2acee2336 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Tue, 24 Oct 2023 11:02:29 -0400 Subject: [PATCH 04/34] Optimize the checkout script (#1956) * Multithread the checkout script #1953 --- sorc/checkout.sh | 33 +++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 9c9addad1d..3554cd99da 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -34,7 +34,7 @@ function checkout() { # logdir [default: $(pwd)]: where you want logfiles written # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone # - # Usage: checkout + # Usage: checkout # # Arguments # dir: Directory for the clone @@ -48,7 +48,8 @@ function checkout() { dir="$1" remote="$2" version="$3" - recursive=${4:-"YES"} + cpus="${4:-1}" # Default 1 thread + recursive=${5:-"YES"} name=$(echo "${dir}" | cut -d '.' -f 1) echo "Performing checkout of ${name}" @@ -90,7 +91,7 @@ function checkout() { fi if [[ "${recursive}" == "YES" ]]; then echo "|-- Updating submodules (if any)" - git submodule update --init --recursive >> "${logfile}" 2>&1 + git submodule update --init --recursive -j "${cpus}" >> "${logfile}" 2>&1 status=$? if ((status > 0)); then echo " WARNING: Error while updating submodules of ${name}" @@ -149,25 +150,33 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 -checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" ; errs=$((errs + $?)) -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "a283262" ; errs=$((errs + $?)) -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" ; errs=$((errs + $?)) -checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) +# Checkout UFS submodules in parallel +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" "8" ; errs=$((errs + $?)) + +# Run all other checkouts simultaneously with just 1 core each to handle submodules. +checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & +checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "a283262" & +checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" & +checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" & if [[ ${checkout_gsi} == "YES" ]]; then - checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "ca19008" "NO"; errs=$((errs + $?)) + checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "ca19008" "1" "NO" & fi if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22"; errs=$((errs + $?)) + checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22" & fi if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then - checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) + checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b" & + checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3" & fi +# Go through each PID and verify no errors were reported. +for checkout_pid in $(jobs -p); do + wait "${checkout_pid}" || errs=$((errs + $?)) +done + if (( errs > 0 )); then echo "WARNING: One or more errors encountered during checkout process, please check logs before building" fi From e2c624d8904cd988394c73d0edb22fa593229d3f Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Tue, 24 Oct 2023 13:32:52 -0400 Subject: [PATCH 05/34] Refactor UFSDA ATM var and ens layout (#1945) --- parm/config/gfs/config.atmanl | 7 ++----- parm/config/gfs/config.atmensanl | 7 ++----- parm/config/gfs/config.resources | 13 +++++++++++++ parm/config/gfs/yaml/defaults.yaml | 8 ++++++++ 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 0d388f94bd..abfbd80734 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -13,11 +13,8 @@ export STATICB_TYPE="gsibec" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml export INTERP_METHOD='barycentric' -export layout_x=1 -export layout_y=1 - -export io_layout_x=1 -export io_layout_y=1 +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 7a696fa734..58fd7b6e22 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -10,11 +10,8 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml export INTERP_METHOD='barycentric' -export layout_x=1 -export layout_y=1 - -export io_layout_x=1 -export io_layout_y=1 +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 9919b81b7e..fc6624df98 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -189,6 +189,15 @@ elif [[ ${step} = "waveawipsgridded" ]]; then elif [[ "${step}" = "atmanlinit" ]]; then + # make below case dependent later + export layout_x=1 + export layout_y=1 + + layout_gsib_x=$(echo "${layout_x} * 3" | bc) + export layout_gsib_x + layout_gsib_y=$(echo "${layout_y} * 2" | bc) + export layout_gsib_y + export wtime_atmanlinit="00:10:00" export npe_atmanlinit=1 export nth_atmanlinit=1 @@ -790,6 +799,10 @@ elif [[ ${step} = "stage_ic" ]]; then elif [[ "${step}" = "atmensanlinit" ]]; then + # make below case dependent later + export layout_x=1 + export layout_y=1 + export wtime_atmensanlinit="00:10:00" export npe_atmensanlinit=1 export nth_atmensanlinit=1 diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 8c2b4ff22b..c0298edb18 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -6,6 +6,14 @@ base: DO_JEDILANDDA: "NO" DO_MERGENSST: "NO" +atmanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + +atmensanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + aeroanl: IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 From 1b00224e18842cd873eb1779be08f96687e49e1f Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Tue, 24 Oct 2023 14:48:24 -0400 Subject: [PATCH 06/34] Set SENDCOM=YES for tracker/genesis tasks (#1971) Set SENDCOM to YES in config.vrfy to get outputs copied back to COM. Will reevaluate the need for SENDCOM when moving the tracker/genesis jobs out of the vrfy job with issue #235 work. Refs #1947 --- parm/config/gfs/config.vrfy | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy index 0f0ce4ff9d..8754609c50 100644 --- a/parm/config/gfs/config.vrfy +++ b/parm/config/gfs/config.vrfy @@ -66,6 +66,8 @@ fi # Cyclone genesis and cyclone track verification #------------------------------------------------- +export SENDCOM="YES" # Needed by tracker/genesis scripts still + export HOMEens_tracker=$BASE_GIT/TC_tracker/${tracker_ver} if [[ "${VRFYTRAK}" = "YES" ]]; then From c58deae0cf078d1ee093529064d74f60482aa3f4 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:19:04 -0600 Subject: [PATCH 07/34] Updates for NOAA CSP AWS global-workflow related file paths. (#1970) Co-authored-by: henrywinterbottom-wxdev --- docs/source/noaa_csp.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/noaa_csp.rst b/docs/source/noaa_csp.rst index 3af8956293..66317efe92 100644 --- a/docs/source/noaa_csp.rst +++ b/docs/source/noaa_csp.rst @@ -183,14 +183,14 @@ the global-workflow. The software stack supporting the ``develop`` branch of the global-workflow is provided for the user and is located beneath -``/contrib/global-workflow/spack-stack``. The modules required for the +``/contrib/emc_static/spack-stack``. The modules required for the global-workflow execution may be loaded as follows. .. code-block:: bash user@host:$ module unuse /opt/cray/craype/default/modulefiles user@host:$ module unuse /opt/cray/modulefiles - user@host:$ module use /contrib/global-workflow/spack-stack/miniconda/modulefiles/miniconda + user@host:$ module use /contrib/emc_static/spack-stack/miniconda/modulefiles/miniconda user@host:$ module load py39_4.12.0 user@host:$ module load rocoto/1.3.3 From 7cdfad4eaa7abe0769ff13396c54e6d93afebf8f Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Tue, 24 Oct 2023 22:19:46 +0000 Subject: [PATCH 08/34] Build GDASapp for CI tests (#1964) * added -u to global checkout so CI test builds tests for GDASapps * Update check_ci.sh needed more quotes --------- Co-authored-by: TerrenceMcGuinness-NOAA --- ci/scripts/check_ci.sh | 4 ++-- ci/scripts/clone-build_ci.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index d5cf6a20bd..097e20ced4 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -89,9 +89,9 @@ for pr in ${pr_list}; do # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}" --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true if [[ -n "${weekly_labels}" ]]; then - num_platforms=$(find ../platforms -type f -name "config.*" | wc -l) + num_platforms=$(find "${ROOT_DIR}/ci/platforms" -type f -name "config.*" | wc -l) passed=0 - for platforms in ../platforms/config.*; do + for platforms in "${ROOT_DIR}"/ci/platforms/config.*; do machine=$(basename "${platforms}" | cut -d. -f2) if [[ "${weekly_labels}" == *"CI-${machine^}-Passed"* ]]; then ((passed=passed+1)) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 796e4b7014..03eff13158 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -79,7 +79,7 @@ echo "${commit}" > "../commit" cd sorc || exit 1 set +e # TODO enable -u later when GDASApp tests are added -./checkout.sh -c -g >> log.checkout 2>&1 +./checkout.sh -c -g -u >> log.checkout 2>&1 checkout_status=$? if [[ ${checkout_status} != 0 ]]; then { From e817f5dd38c26a88f76d90eb71124f1acbfc5a8f Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 25 Oct 2023 02:03:14 +0000 Subject: [PATCH 09/34] Fix path for marine products (#1966) When PR #1823 was merged, the name of the marine product template was not updated in ocnpost, leading the products to be placed in the wrong location and missed by the archive job. Resolves #1902 --- jobs/rocoto/ocnpost.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh index 0766ac3b37..5a2dc091cf 100755 --- a/jobs/rocoto/ocnpost.sh +++ b/jobs/rocoto/ocnpost.sh @@ -29,7 +29,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3 COM_OCEAN_XSECT COM_ICE_HISTORY for grid in "0p50" "0p25"; do - YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL" done for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do From 0b5cf9b67a8146d9d4815ea6545f7f2524120d83 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Wed, 25 Oct 2023 13:17:28 -0400 Subject: [PATCH 10/34] Update GFS version in index.rst to v16.3.10 (#1976) Update the "State of operations" GFS version number to new v16.3.10 (Annual CO2 fix file update in operations). Refs #1924 --- docs/source/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index face361de4..4c39e8dcb7 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -10,7 +10,7 @@ Status ====== * State of develop (HEAD) branch: GFSv17+ development -* State of operations (dev/gfs.v16 branch): GFS v16.3.9 `tag: [gfs.v16.3.9] `_ +* State of operations (dev/gfs.v16 branch): GFS v16.3.10 `tag: [gfs.v16.3.10] `_ ============= Code managers From 8556541daa79b0180fde48a58a8dcfb2f8c56ea5 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Wed, 25 Oct 2023 16:15:28 -0400 Subject: [PATCH 11/34] Fix incorrect usage of CFP on WCOSS2 (#1977) --- ush/run_mpmd.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh index 352a411312..24cb3f2656 100755 --- a/ush/run_mpmd.sh +++ b/ush/run_mpmd.sh @@ -4,6 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD mode"} +# Determine the number of MPMD processes from incoming ${cmdfile} +nprocs=$(wc -l < "${cmdfile}") + # Local MPMD file containing instructions to run in CFP mpmd_cmdfile="${DATA:-}/mpmd_cmdfile" if [[ -s "${mpmd_cmdfile}" ]]; then rm -f "${mpmd_cmdfile}"; fi @@ -19,7 +22,6 @@ if [[ "${launcher:-}" =~ ^srun.* ]]; then # srun-based system e.g. Hera, Orion ((nm=nm+1)) done < "${cmdfile}" - nprocs=$(wc -l < "${mpmd_cmdfile}") set +e # shellcheck disable=SC2086 ${launcher:-} ${mpmd_opt:-} -n ${nprocs} "${mpmd_cmdfile}" @@ -42,7 +44,8 @@ elif [[ "${launcher:-}" =~ ^mpiexec.* ]]; then # mpiexec done < "${cmdfile}" chmod 755 "${mpmd_cmdfile}" - ${launcher:-} "${mpmd_cmdfile}" + # shellcheck disable=SC2086 + ${launcher:-} -np ${nprocs} ${mpmd_opt:-} "${mpmd_cmdfile}" rc=$? if (( rc == 0 )); then out_files=$(find . -name 'mpmd.*.out') From c02e118c0ac5485109c9bd33472ff42db309cd9b Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Thu, 26 Oct 2023 15:05:05 -0400 Subject: [PATCH 12/34] Update ufs model to version from 10-12-23 (#1933) This updates the ufs-weather-model to the commit hash from 10-12-23 from the HR2 tag. The diffs can be seen here: https://github.com/ufs-community/ufs-weather-model/compare/GFSv17.HR2...68050e58589a82ab509aaefaafdc2a6b90f34e48 Resolves #1811 --- Externals.cfg | 2 +- env/WCOSS2.env | 1 + jobs/rocoto/efcs.sh | 20 +----- jobs/rocoto/fcst.sh | 63 ++-------------- jobs/rocoto/waveinit.sh | 3 +- jobs/rocoto/wavepostbndpnt.sh | 3 +- jobs/rocoto/wavepostbndpntbll.sh | 3 +- jobs/rocoto/wavepostpnt.sh | 3 +- jobs/rocoto/wavepostsbs.sh | 3 +- jobs/rocoto/waveprep.sh | 3 +- parm/config/gefs/config.base.emc.dyn | 2 +- parm/config/gfs/config.resources | 2 +- parm/ufs/chem/CAP.rc | 2 + parm/ufs/chem/GOCART2G_GridComp.rc | 2 + parm/ufs/nems.configure.cpld.IN | 11 ++- parm/ufs/nems.configure.cpld_aero.IN | 11 ++- .../ufs/nems.configure.cpld_aero_outerwave.IN | 11 ++- parm/ufs/nems.configure.cpld_aero_wave.IN | 12 +++- parm/ufs/nems.configure.cpld_outerwave.IN | 11 ++- parm/ufs/nems.configure.cpld_wave.IN | 11 ++- scripts/exglobal_forecast.sh | 1 + sorc/checkout.sh | 2 +- ush/forecast_postdet.sh | 23 +++++- ush/load_ufswm_modules.sh | 71 +++++++++++++++++++ ush/nems_configure.sh | 2 + ush/parsing_namelists_FV3.sh | 12 ---- 26 files changed, 172 insertions(+), 118 deletions(-) create mode 100755 ush/load_ufswm_modules.sh diff --git a/Externals.cfg b/Externals.cfg index eaf397edb9..1b30c321ba 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ protocol = git required = True [UFS] -tag = 4d05445 +tag = 68050e5 local_path = sorc/ufs_model.fd repo_url = https://github.com/ufs-community/ufs-weather-model.git protocol = git diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 22d65ba0ed..ae5ceaa022 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -35,6 +35,7 @@ elif [[ "${step}" = "preplandobs" ]]; then elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + export USE_CFP="YES" if [[ "${step}" = "waveprep" ]] && [[ "${CDUMP}" = "gfs" ]]; then export NTASKS=${NTASKS_gfs} ; fi export wavempexec="${launcher} -np" export wave_mpmd=${mpmd_opt} diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh index 46a25ac759..fa9dc0c269 100755 --- a/jobs/rocoto/efcs.sh +++ b/jobs/rocoto/efcs.sh @@ -5,23 +5,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules #. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/sorc/ufs_model.fd/tests" -module load modules.ufs_model.lua -# Workflow needs utilities from prod_util (setPDY.sh, ndate, etc.) -module load prod_util -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals -fi -module list -unset MACHINE_ID -set_trace +. ${HOMEgfs}/ush/load_ufswm_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="efcs" export jobid="${job}.$$" diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 9d59f70bd8..a5be293f9e 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -3,65 +3,11 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### -# Source FV3GFS workflow modules +# Source UFS Weather Model workflow modules #. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -if [[ "${MACHINE_ID}" != "noaacloud" ]]; then - module use "${HOMEgfs}/sorc/ufs_model.fd/tests" - module load modules.ufs_model.lua - module load prod_util -fi - -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals -fi -if [[ "${MACHINE_ID}" = "hera" ]]; then - module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core" - module load "miniconda3/4.6.14" - module load "gfs_workflow/1.0.0" -# TODO: orion and wcoss2 will be uncommented when they are ready. This comment block will be removed in the next PR -#elif [[ "${MACHINE_ID}" = "orion" ]]; then -# module use "/home/rmahajan/opt/global-workflow/modulefiles/core" -# module load "python/3.7.5" -# module load "gfs_workflow/1.0.0" -#elif [[ "${MACHINE_ID}" = "wcoss2" ]]; then -# module load "python/3.7.5" -fi -if [[ "${MACHINE_ID}" == "noaacloud" ]]; then - if [[ "${PW_CSP:-}" = "aws" ]]; then - - # TODO: This can be cleaned-up; most of this is a hack for now. - module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core" - module load "stack-intel" - module load "stack-intel-oneapi-mpi" - module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/" - module load "py39_4.12.0" - module load "ufs-weather-model-env/1.0.0" - export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0" - # TODO: Are there plans for EPIC to maintain this package or should GW provide support? - export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" - export PATH="${PATH}:/contrib/global-workflow/bin" - ndate_path="$(command -v ndate)" - export NDATE="${ndate_path}" - fi -fi - -module list -unset MACHINE_ID -set_trace - -############################################################### -# exglobal_forecast.py requires the following in PYTHONPATH -# This will be moved to a module load when ready -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH +. ${HOMEgfs}/ush/load_ufswm_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="fcst" export jobid="${job}.$$" @@ -71,5 +17,4 @@ export jobid="${job}.$$" ${HOMEgfs}/jobs/JGLOBAL_FORECAST status=$? - exit ${status} diff --git a/jobs/rocoto/waveinit.sh b/jobs/rocoto/waveinit.sh index d0c3f49929..b38367d09a 100755 --- a/jobs/rocoto/waveinit.sh +++ b/jobs/rocoto/waveinit.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostbndpnt.sh b/jobs/rocoto/wavepostbndpnt.sh index 5d26498356..1a4f940150 100755 --- a/jobs/rocoto/wavepostbndpnt.sh +++ b/jobs/rocoto/wavepostbndpnt.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostbndpntbll.sh b/jobs/rocoto/wavepostbndpntbll.sh index ce4f9e6b2d..2d128facb7 100755 --- a/jobs/rocoto/wavepostbndpntbll.sh +++ b/jobs/rocoto/wavepostbndpntbll.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostpnt.sh b/jobs/rocoto/wavepostpnt.sh index 9efb755dec..60017394f5 100755 --- a/jobs/rocoto/wavepostpnt.sh +++ b/jobs/rocoto/wavepostpnt.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostsbs.sh b/jobs/rocoto/wavepostsbs.sh index e4bea0bc34..f4789210d8 100755 --- a/jobs/rocoto/wavepostsbs.sh +++ b/jobs/rocoto/wavepostsbs.sh @@ -4,7 +4,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/waveprep.sh b/jobs/rocoto/waveprep.sh index 0cbafde87e..fa934167b7 100755 --- a/jobs/rocoto/waveprep.sh +++ b/jobs/rocoto/waveprep.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index e3e221c006..3d17421408 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -82,7 +82,7 @@ export VERBOSE="YES" export KEEPDATA="NO" export CHGRP_RSTPROD="@CHGRP_RSTPROD@" export CHGRP_CMD="@CHGRP_CMD@" -export NCDUMP="${NETCDF}/bin/ncdump" +export NCDUMP="${NETCDF:-}/bin/ncdump" export NCLEN="${HOMEgfs}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index fc6624df98..5b9fa17234 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -149,7 +149,7 @@ elif [[ ${step} = "wavepostbndpntbll" ]]; then elif [[ ${step} = "wavepostpnt" ]]; then - export wtime_wavepostpnt="01:30:00" + export wtime_wavepostpnt="04:00:00" export npe_wavepostpnt=200 export nth_wavepostpnt=1 npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) diff --git a/parm/ufs/chem/CAP.rc b/parm/ufs/chem/CAP.rc index 64425b1bb4..d40106ae81 100644 --- a/parm/ufs/chem/CAP.rc +++ b/parm/ufs/chem/CAP.rc @@ -8,6 +8,8 @@ REPORT_THROUGHPUT: .false. USE_SHMEM: 0 +GridType: Cubed-Sphere + MAPL_ENABLE_TIMERS: NO MAPL_ENABLE_MEMUTILS: NO PRINTSPEC: 0 # (0: OFF, 1: IMPORT & EXPORT, 2: IMPORT, 3: EXPORT) diff --git a/parm/ufs/chem/GOCART2G_GridComp.rc b/parm/ufs/chem/GOCART2G_GridComp.rc index 5ea4fa45df..18954f8cdd 100644 --- a/parm/ufs/chem/GOCART2G_GridComp.rc +++ b/parm/ufs/chem/GOCART2G_GridComp.rc @@ -39,3 +39,5 @@ PASSIVE_INSTANCES_NI: aerosol_monochromatic_optics_wavelength_in_nm_from_LUT: 470 550 670 870 wavelengths_for_profile_aop_in_nm: 470 550 670 870 # must be included in LUT wavelengths_for_vertically_integrated_aop_in_nm: 470 550 670 870 # must be included in LUT + +use_threads: .TRUE. diff --git a/parm/ufs/nems.configure.cpld.IN b/parm/ufs/nems.configure.cpld.IN index 0f6e68cc4e..2182a96384 100644 --- a/parm/ufs/nems.configure.cpld.IN +++ b/parm/ufs/nems.configure.cpld.IN @@ -73,6 +73,7 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -89,12 +90,10 @@ MED_attributes:: ATM_model = @[atm_model] ICE_model = @[ice_model] OCN_model = @[ocn_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -114,4 +113,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/parm/ufs/nems.configure.cpld_aero.IN b/parm/ufs/nems.configure.cpld_aero.IN index 9d4afff121..7b13318094 100644 --- a/parm/ufs/nems.configure.cpld_aero.IN +++ b/parm/ufs/nems.configure.cpld_aero.IN @@ -85,6 +85,7 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -101,12 +102,10 @@ MED_attributes:: ATM_model = @[atm_model] ICE_model = @[ice_model] OCN_model = @[ocn_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -126,4 +125,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/parm/ufs/nems.configure.cpld_aero_outerwave.IN b/parm/ufs/nems.configure.cpld_aero_outerwave.IN index 78a009b879..fbbf4441f1 100644 --- a/parm/ufs/nems.configure.cpld_aero_outerwave.IN +++ b/parm/ufs/nems.configure.cpld_aero_outerwave.IN @@ -101,6 +101,7 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum MED med_phases_prep_wav_accum @ @@ -121,12 +122,10 @@ MED_attributes:: ICE_model = @[ice_model] OCN_model = @[ocn_model] WAV_model = @[wav_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -146,4 +145,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/parm/ufs/nems.configure.cpld_aero_wave.IN b/parm/ufs/nems.configure.cpld_aero_wave.IN index 6b886b0626..2bb0416983 100644 --- a/parm/ufs/nems.configure.cpld_aero_wave.IN +++ b/parm/ufs/nems.configure.cpld_aero_wave.IN @@ -104,6 +104,7 @@ runSeq:: MED med_phases_post_ice WAV -> MED :remapMethod=redist MED med_phases_post_wav + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -121,11 +122,10 @@ MED_attributes:: ICE_model = @[ice_model] OCN_model = @[ocn_model] WAV_model = @[wav_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -145,4 +145,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/parm/ufs/nems.configure.cpld_outerwave.IN b/parm/ufs/nems.configure.cpld_outerwave.IN index 736e0cf3fd..521e59941a 100644 --- a/parm/ufs/nems.configure.cpld_outerwave.IN +++ b/parm/ufs/nems.configure.cpld_outerwave.IN @@ -89,6 +89,7 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum MED med_phases_prep_wav_accum @ @@ -109,12 +110,10 @@ MED_attributes:: ICE_model = @[ice_model] OCN_model = @[ocn_model] WAV_model = @[wav_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -134,4 +133,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/parm/ufs/nems.configure.cpld_wave.IN b/parm/ufs/nems.configure.cpld_wave.IN index 3a1c918900..f9f4bc99ff 100644 --- a/parm/ufs/nems.configure.cpld_wave.IN +++ b/parm/ufs/nems.configure.cpld_wave.IN @@ -92,6 +92,7 @@ runSeq:: MED med_phases_post_ice WAV -> MED :remapMethod=redist MED med_phases_post_wav + MED med_phases_ocnalb_run MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -109,12 +110,10 @@ MED_attributes:: ICE_model = @[ice_model] OCN_model = @[ocn_model] WAV_model = @[wav_model] - history_n = 0 - history_option = nhours - history_ymd = -999 coupling_mode = @[CPLMODE] history_tile_atm = @[ATMTILESIZE] pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] :: ALLCOMP_attributes:: ScalarFieldCount = 2 @@ -134,4 +133,10 @@ ALLCOMP_attributes:: stop_n = @[FHMAX] stop_option = nhours stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 :: diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 076c635c17..86cea85dee 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -164,6 +164,7 @@ FV3_out [[ ${cplflx} = .true. ]] && MOM6_out [[ ${cplwav} = .true. ]] && WW3_out [[ ${cplice} = .true. ]] && CICE_out +[[ ${cplchm} = .true. ]] && GOCART_out [[ ${esmf_profile:-} = .true. ]] && CPL_out echo "MAIN: Output copied to COMROT" diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 3554cd99da..1a3d2c9da6 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 # Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" "8" ; errs=$((errs + $?)) +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" ; errs=$((errs + $?)) # Run all other checkouts simultaneously with just 1 core each to handle submodules. checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 1336abe5b9..e3166cd72f 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -1038,7 +1038,26 @@ GOCART_postdet() { rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" fi - ${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ - "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" + #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end + #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ + # "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done } + +GOCART_out() { + echo "SUB ${FUNCNAME[0]}: Copying output data for GOCART" + + # Copy gocart.inst_aod after the forecast is run (and successfull) + # TO DO: this should be linked but there were issues where gocart was crashing if it was linked + local fhr + local vdate + for fhr in ${FV3_OUTPUT_FH}; do + if (( fhr == 0 )); then continue; fi + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ + "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" + done + + +} + diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh new file mode 100755 index 0000000000..2d6e6a02c1 --- /dev/null +++ b/ush/load_ufswm_modules.sh @@ -0,0 +1,71 @@ +#! /usr/bin/env bash + +############################################################### +if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then + echo "Loading modules quietly..." + set +x +fi + +# Setup runtime environment by loading modules +ulimit_s=$( ulimit -S -s ) + +source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEgfs}/ush/module-setup.sh" +if [[ "${MACHINE_ID}" != "noaacloud" ]]; then + module use "${HOMEgfs}/sorc/ufs_model.fd/tests" + module load modules.ufs_model.lua + if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + module load prod_util + module load cray-pals + module load cfp + module load libjpeg + else + module load prod-util + export UTILROOT=${prod_util_ROOT} + fi + module load wgrib2 + export WGRIB2=wgrib2 +fi +if [[ "${MACHINE_ID}" = "hera" ]]; then + module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core" + module load "miniconda3/4.6.14" + module load "gfs_workflow/1.0.0" +fi +if [[ "${MACHINE_ID}" == "noaacloud" ]]; then + if [[ "${PW_CSP:-}" = "aws" ]]; then + + # TODO: This can be cleaned-up; most of this is a hack for now. + module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core" + module load "stack-intel" + module load "stack-intel-oneapi-mpi" + module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/" + module load "py39_4.12.0" + module load "ufs-weather-model-env/1.0.0" + export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0" + # TODO: Are there plans for EPIC to maintain this package or should GW provide support? + export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" + export PATH="${PATH}:/contrib/global-workflow/bin" + ndate_path="$(command -v ndate)" + export NDATE="${ndate_path}" + fi +fi + +module list +unset MACHINE_ID + +############################################################### +# exglobal_forecast.py requires the following in PYTHONPATH +# This will be moved to a module load when ready +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +# Restore stack soft limit: +ulimit -S -s "${ulimit_s}" +unset ulimit_s + +# If this function exists in the environment, run it; else do not +ftype=$(type -t set_trace) +if [[ "${ftype}" == "function" ]]; then + set_trace +fi diff --git a/ush/nems_configure.sh b/ush/nems_configure.sh index ecbc4acde5..28c6713dd8 100755 --- a/ush/nems_configure.sh +++ b/ush/nems_configure.sh @@ -56,7 +56,9 @@ if [[ "${cplflx}" = ".true." ]]; then local CPLMODE="${cplmode}" local coupling_interval_fast_sec="${CPL_FAST}" local RESTART_N="${restart_interval}" + local ocean_albedo_limit=0.06 local ATMTILESIZE="${CASE:1}" + local ocean_albedo_limit=0.06 fi if [[ "${cplice}" = ".true." ]]; then diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 27cb96ab81..ec3af83415 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -74,18 +74,6 @@ cat > input.nml < Date: Thu, 26 Oct 2023 16:10:23 -0400 Subject: [PATCH 13/34] Test tarballs for rstprod before calling chgrp (#1967) Certain tarballs may or may not contain `rstprod` data. For instance, the first half cycle gdas and enkfgdas tarballs will not contain `rstprod`, while future cycles likely will. Also, some systems do not have `rstprod` on them. This will test the contents of the tarballs first before attempting to change the group to rstprod. Resolves #1460 --- scripts/exgdas_enkf_earc.sh | 37 +++++++++++++++++++++++++++---- scripts/exglobal_archive.sh | 44 ++++++++++++++++++++++++++++++++----- 2 files changed, 71 insertions(+), 10 deletions(-) diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh index a1bcba4d79..f172a4ef41 100755 --- a/scripts/exgdas_enkf_earc.sh +++ b/scripts/exgdas_enkf_earc.sh @@ -112,12 +112,41 @@ if [ "${ENSGRP}" -eq 0 ]; then fi set +e - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt") + # Check if the tarball will have rstprod in it + has_rstprod="NO" + while IFS= read -r file; do + if [[ -f ${file} ]]; then + group=$( stat -c "%G" "${file}" ) + if [[ "${group}" == "rstprod" ]]; then + has_rstprod="YES" + break + fi + fi + done < "${ARCH_LIST}/${RUN}.txt" + + # Create the tarball + tar_fl=${ATARDIR}/${PDY}${cyc}/${RUN}.tar + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${RUN}.txt") status=$? - ${HSICMD} chgrp rstprod "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" - ${HSICMD} chmod 640 "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + + # If rstprod was found, change the group of the tarball + if [[ "${has_rstprod}" == "YES" ]]; then + ${HSICMD} chgrp rstprod "${tar_fl}" + stat_chgrp=$? + ${HSICMD} chmod 640 "${tar_fl}" + stat_chgrp=$((stat_chgrp+$?)) + if [[ "${stat_chgrp}" -gt 0 ]]; then + echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" + echo "Attempting to delete ${tar_fl}" + ${HSICMD} rm "${tar_fl}" + echo "Please verify that ${tar_fl} was deleted!" + exit "${stat_chgrp}" + fi + fi + + # For safety, test if the htar/tar command failed only after changing groups if (( status != 0 && ${PDY}${cyc} >= firstday )); then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}.tar failed" + echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" exit "${status}" fi set_strict diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 78a6d60b65..54323a0dd0 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -262,17 +262,49 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then shopt -s extglob for targrp in ${targrp_list}; do set +e - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") - status=$? + + # Test whether gdas.tar or gdas_restarta.tar will have rstprod data + has_rstprod="NO" case ${targrp} in 'gdas'|'gdas_restarta') - ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${targrp}.tar" - ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${targrp}.tar" + # Test for rstprod in each archived file + while IFS= read -r file; do + if [[ -f ${file} ]]; then + group=$( stat -c "%G" "${file}" ) + if [[ "${group}" == "rstprod" ]]; then + has_rstprod="YES" + break + fi + fi + done < "${ARCH_LIST}/${targrp}.txt" + ;; *) ;; esac - if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${targrp}.tar failed" + + # Create the tarball + tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar" + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${targrp}.txt") + status=$? + + # Change group to rstprod if it was found even if htar/tar failed in case of partial creation + if [[ "${has_rstprod}" == "YES" ]]; then + ${HSICMD} chgrp rstprod "${tar_fl}" + stat_chgrp=$? + ${HSICMD} chmod 640 "${tar_fl}" + stat_chgrp=$((stat_chgrp+$?)) + if [ "${stat_chgrp}" -gt 0 ]; then + echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" + echo "Attempting to delete ${tar_fl}" + ${HSICMD} rm "${tar_fl}" + echo "Please verify that ${tar_fl} was deleted!" + exit "${stat_chgrp}" + fi + fi + + # For safety, test if the htar/tar command failed after changing groups + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" exit "${status}" fi set_strict From 77c1ff2bfa26b341b01f41e50ee0ecb31cc4f661 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Thu, 26 Oct 2023 23:44:27 -0400 Subject: [PATCH 14/34] Update GDASApp hash (#1975) Update GDASApp hash to bring recent UFSDA development into g-w. Resolves #1972 --- Externals.cfg | 2 +- sorc/checkout.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 1b30c321ba..abe5f30aaf 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -57,7 +57,7 @@ protocol = git required = False [GDASApp] -hash = d347d22 +hash = 7659c10 local_path = sorc/gdas.cd repo_url = https://github.com/NOAA-EMC/GDASApp.git protocol = git diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 1a3d2c9da6..a756c8d040 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -164,7 +164,7 @@ if [[ ${checkout_gsi} == "YES" ]]; then fi if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22" & + checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "7659c10" & fi if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then From aff6ca7ae559e424dae128d6cfd5da4a4e3049e7 Mon Sep 17 00:00:00 2001 From: Anil Kumar <108816337+AnilKumar-NOAA@users.noreply.github.com> Date: Fri, 27 Oct 2023 14:11:55 -0400 Subject: [PATCH 15/34] GEFS Staging in exglobal_stage_ic (#1892) Adjusts source paths for forecast-only ICs in support of extension to ensembles. New paths omit the resolution (that is captured by the IC id name) and allow for a member level. Directories with the reorganized ICs have been added to the three tier-1 machines under glopara space. Building on this, also adds capability to stage GEFS ICs. Resolves #911 --- parm/config/gefs/config.base.emc.dyn | 1 + parm/config/gefs/config.coupled_ic | 43 -------- parm/config/gefs/config.resources | 12 +-- parm/config/gefs/config.stage_ic | 23 +++++ parm/config/gfs/config.stage_ic | 24 ++--- scripts/exglobal_stage_ic.sh | 148 +++++++++++++-------------- workflow/applications/gefs.py | 4 +- workflow/hosts/hera.yaml | 2 +- workflow/hosts/wcoss2.yaml | 2 +- workflow/rocoto/gefs_tasks.py | 58 ++++++++++- workflow/rocoto/gfs_tasks.py | 19 ++-- workflow/setup_expt.py | 11 +- 12 files changed, 181 insertions(+), 166 deletions(-) delete mode 100644 parm/config/gefs/config.coupled_ic create mode 100644 parm/config/gefs/config.stage_ic diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index 3d17421408..b62c921ed0 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -40,6 +40,7 @@ export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops export COMINsyn="@COMINsyn@" +export BASE_CPLIC="@BASE_CPLIC@" # USER specific paths export HOMEDIR="@HOMEDIR@" diff --git a/parm/config/gefs/config.coupled_ic b/parm/config/gefs/config.coupled_ic deleted file mode 100644 index 50fab283b5..0000000000 --- a/parm/config/gefs/config.coupled_ic +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env bash - -########## config.coupled_ic ########## - -echo "BEGIN: config.coupled_ic" - -# Get task specific resources -source ${EXPDIR}/config.resources coupled_ic - -if [[ "${machine}" == "WCOSS2" ]]; then - export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" -elif [[ "${machine}" == "HERA" ]]; then - export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" -elif [[ "${machine}" == "ORION" ]]; then - export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" -elif [[ "${machine}" == "S4" ]]; then - export BASE_CPLIC="/data/prod/glopara/coupled_ICs" -elif [[ "${machine}" == "JET" ]]; then - export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" -fi - - -case "${CASE}" in - "C384") - #C384 and P8 ICs - export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c - export CPL_ICEIC=CPC - export CPL_OCNIC=CPC3Dvar - export CPL_WAVIC=GEFSwave20210528v2 - ;; - "C768") - export CPL_ATMIC=HR1 - export CPL_ICEIC=HR1 - export CPL_OCNIC=HR1 - export CPL_WAVIC=HR1 - ;; - *) - echo "Unrecognized case: ${1}" - exit 1 - ;; -esac - -echo "END: config.coupled_ic" diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 33156a768a..74e9854084 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -8,7 +8,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" - echo "coupled_ic aerosol_init" + echo "stage_ic aerosol_init" echo "sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" echo "ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" @@ -416,12 +416,12 @@ elif [[ ${step} = "arch" || ${step} = "earc" ]]; then eval "export memory_${step}=50GB" fi -elif [[ ${step} = "coupled_ic" ]]; then +elif [[ ${step} = "stage_ic" ]]; then - export wtime_coupled_ic="00:15:00" - export npe_coupled_ic=1 - export npe_node_coupled_ic=1 - export nth_coupled_ic=1 + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 export is_exclusive=True elif [[ ${step} = "ecen" ]]; then diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic new file mode 100644 index 0000000000..e2bb0af2b8 --- /dev/null +++ b/parm/config/gefs/config.stage_ic @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48") + export CPL_ATMIC="gefs_test" + export CPL_ICEIC="gefs_test" + export CPL_OCNIC="gefs_test" + export CPL_WAVIC="gefs_test" + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic index 4e29c3306e..6d081b3fe0 100644 --- a/parm/config/gfs/config.stage_ic +++ b/parm/config/gfs/config.stage_ic @@ -9,22 +9,22 @@ source "${EXPDIR}/config.resources" stage_ic case "${CASE}" in "C48" | "C96") - export CPL_ATMIC=workflowtest - export CPL_ICEIC=workflowtest - export CPL_OCNIC=workflowtest - export CPL_WAVIC=workflowtest + export CPL_ATMIC="workflow_${CASE}_refactored" + export CPL_ICEIC="workflow_${CASE}_refactored" + export CPL_OCNIC="workflow_${CASE}_refactored" + export CPL_WAVIC="workflow_${CASE}_refactored" ;; "C384") - export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c - export CPL_ICEIC=CPC - export CPL_OCNIC=CPC3Dvar - export CPL_WAVIC=GEFSwave20210528v2 + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored + export CPL_ICEIC=CPC_refactored + export CPL_OCNIC=CPC3Dvar_refactored + export CPL_WAVIC=GEFSwave20210528v2_refactored ;; "C768") - export CPL_ATMIC=HR2 - export CPL_ICEIC=HR1 - export CPL_OCNIC=HR1 - export CPL_WAVIC=HR1 + export CPL_ATMIC=HR2_refactored + export CPL_ICEIC=HR1_refactored + export CPL_OCNIC=HR1_refactored + export CPL_WAVIC=HR1_refactored ;; *) echo "FATAL ERROR Unrecognized resolution: ${CASE}" diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index e42a4943d4..fc95fa6654 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -1,4 +1,4 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" @@ -8,99 +8,89 @@ GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) gPDY="${GDATE:0:8}" gcyc="${GDATE:8:2}" +MEMDIR_ARRAY=() +if [[ "${RUN}" == "gefs" ]]; then + # Populate the member_dirs array based on the value of NMEM_ENS + for ((ii = 0; ii <= "${NMEM_ENS}"; ii++)); do + MEMDIR_ARRAY+=("mem$(printf "%03d" "${ii}")") + done +else + MEMDIR_ARRAY+=("") +fi + # Initialize return code err=0 -error_message(){ - echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})" +error_message() { + echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})" } ############################################################### -# Start staging - -# Stage the FV3 initial conditions to ROTDIR (cold start) -YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_INPUT -[[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" -source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/gfs_ctrl.nc" -target="${COM_ATMOS_INPUT}/gfs_ctrl.nc" -${NCP} "${source}" "${target}" -rc=$? -(( rc != 0 )) && error_message "${source}" "${target}" "${rc}" -err=$((err + rc)) -for ftype in gfs_data sfc_data; do - for tt in $(seq 1 6); do - source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/${ftype}.tile${tt}.nc" - target="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" - ${NCP} "${source}" "${target}" - rc=$? - (( rc != 0 )) && error_message "${source}" "${target}" "${rc}" - err=$((err + rc)) - done -done - -# Stage ocean initial conditions to ROTDIR (warm start) -if [[ "${DO_OCN:-}" = "YES" ]]; then - YMD=${gPDY} HH=${gcyc} generate_com -r COM_OCEAN_RESTART - [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}" - source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res.nc" - target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc" - ${NCP} "${source}" "${target}" +for MEMDIR in "${MEMDIR_ARRAY[@]}"; do + # Stage the FV3 initial conditions to ROTDIR (cold start) + YMD=${PDY} HH=${cyc} generate_com COM_ATMOS_INPUT + [[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" + src="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${MEMDIR}/atmos/gfs_ctrl.nc" + tgt="${COM_ATMOS_INPUT}/gfs_ctrl.nc" + ${NCP} "${src}" "${tgt}" rc=$? - (( rc != 0 )) && error_message "${source}" "${target}" "${rc}" + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" err=$((err + rc)) - case "${OCNRES}" in - "500" | "100") # Only 5 degree or 1 degree ocean does not have MOM.res_[1-4].nc files - ;; - "025") # Only 1/4 degree ocean has MOM.res_[1-4].nc files - for nn in $(seq 1 4); do - source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res_${nn}.nc" - if [[ -f "${source}" ]]; then - target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" - ${NCP} "${source}" "${target}" - rc=$? - (( rc != 0 )) && error_message "${source}" "${target}" "${rc}" - err=$((err + rc)) - fi - done - ;; - *) - echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" - rc=1 + for ftype in gfs_data sfc_data; do + for ((tt = 1; tt <= 6; tt++)); do + src="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${MEMDIR}/atmos/${ftype}.tile${tt}.nc" + tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" err=$((err + rc)) - ;; - esac -fi - -# Stage ice initial conditions to ROTDIR (warm start) -if [[ "${DO_ICE:-}" = "YES" ]]; then - YMD=${gPDY} HH=${gcyc} generate_com -r COM_ICE_RESTART - [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}" - ICERESdec=$(echo "${ICERES}" | awk '{printf "%0.2f", $1/100}') - source="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/ice/${ICERES}/cice5_model_${ICERESdec}.res_${PDY}${cyc}.nc" - target="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" - ${NCP} "${source}" "${target}" - rc=$? - (( rc != 0 )) && error_message "${source}" "${target}" "${rc}" - err=$((err + rc)) -fi + done + done -# Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) -if [[ "${DO_WAVE:-}" = "YES" ]]; then - YMD=${PDY} HH=${cyc} generate_com -r COM_WAVE_RESTART - [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" - for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust - source="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/wav/${grdID}/${PDY}.${cyc}0000.restart.${grdID}" - target="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" - ${NCP} "${source}" "${target}" + # Stage ocean initial conditions to ROTDIR (warm start) + if [[ "${DO_OCN:-}" = "YES" ]]; then + YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART + [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}" + src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc" + tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc" + ${NCP} "${src}" "${tgt}" rc=$? - (( rc != 0 )) && error_message "${source}" "${target}" "${rc}" + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" err=$((err + rc)) - done -fi + fi + # Stage ice initial conditions to ROTDIR (warm start) + if [[ "${DO_ICE:-}" = "YES" ]]; then + YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART + [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}" + src="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc" + tgt="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + fi + + # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) + if [[ "${DO_WAVE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com COM_WAVE_RESTART + [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" + for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust + src="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/${MEMDIR}/wave/${PDY}.${cyc}0000.restart.${grdID}" + tgt="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + fi +done # for MEMDIR in "${MEMDIR_ARRAY[@]}"; do ############################################################### # Check for errors and exit if any of the above failed -if [[ "${err}" -ne 0 ]] ; then +if [[ "${err}" -ne 0 ]]; then echo "FATAL ERROR: Unable to copy ICs from ${BASE_CPLIC} to ${ROTDIR}; ABORT!" exit "${err}" fi diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index a46451bd3e..8ac4cdc18e 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -14,7 +14,7 @@ def _get_app_configs(self): """ Returns the config_files that are involved in gefs """ - configs = ['fcst'] + configs = ['stage_ic', 'fcst'] if self.nens > 0: configs += ['efcs'] @@ -32,7 +32,7 @@ def _update_base(base_in): def get_task_names(self): - tasks = ['fcst'] + tasks = ['stage_ic', 'fcst'] if self.nens > 0: tasks += ['efcs'] diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml index 61270b7b27..31911f2d21 100644 --- a/workflow/hosts/hera.yaml +++ b/workflow/hosts/hera.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' -BASE_CPLIC: '/scratch1/NCEPDEV/climate/role.ufscpara/IC' +BASE_CPLIC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs' PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' COMROOT: '/scratch1/NCEPDEV/global/glopara/com' COMINsyn: '${COMROOT}/gfs/prod/syndat' diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml index 2a301064db..41e1044eff 100644 --- a/workflow/hosts/wcoss2.yaml +++ b/workflow/hosts/wcoss2.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/lfs/h2/emc/global/save/emc.global/git' DMPDIR: '/lfs/h2/emc/dump/noscrub/dump' -BASE_CPLIC: '/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC' +BASE_CPLIC: '/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/prototype_ICs' PACKAGEROOT: '${PACKAGEROOT:-"/lfs/h1/ops/prod/packages"}' COMROOT: '${COMROOT:-"/lfs/h1/ops/prod/com"}' COMINsyn: '${COMROOT}/gfs/v16.3/syndat' diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index b0c56bdb66..c5dae3a13d 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -8,9 +8,64 @@ class GEFSTasks(Tasks): def __init__(self, app_config: AppConfig, cdump: str) -> None: super().__init__(app_config, cdump) + def stage_ic(self): + + cpl_ic = self._configs['stage_ic'] + + deps = [] + + # Atm ICs + if self.app_config.do_atm: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ocean ICs + if self.app_config.do_ocean: + ocn_res = f"{self._base.get('OCNRES', '025'):03d}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean" + data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + if ocn_res in ['025']: + # 0.25 degree ocean model also has these additional restarts + for res in [f'res_{res_index}' for res_index in range(1, 4)]: + data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ice ICs + if self.app_config.do_ice: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice" + data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Wave ICs + if self.app_config.do_wave: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave" + for wave_grid in self._configs['waveinit']['waveGRD'].split(): + data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('stage_ic') + task = create_wf_task('stage_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def fcst(self): # TODO: Add real dependencies dependencies = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) resources = self.get_resource('fcst') task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) @@ -18,8 +73,9 @@ def fcst(self): return task def efcs(self): - # TODO: Add real dependencies dependencies = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) efcsenvars = self.envars.copy() efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 56449cb9d5..d5c5ba2ed1 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -23,13 +23,12 @@ def stage_ic(self): # Atm ICs if self.app_config.do_atm: - atm_res = self._base.get('CASE', 'C384') - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/atmos" for file in ['gfs_ctrl.nc'] + \ [f'{datatype}_data.tile{tile}.nc' for datatype in ['gfs', 'sfc'] for tile in range(1, self.n_tiles + 1)]: - data = f"{prefix}/{atm_res}/INPUT/{file}" + data = f"{prefix}/{file}" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) else: # data-atmosphere @@ -42,31 +41,29 @@ def stage_ic(self): # Ocean ICs if self.app_config.do_ocean: ocn_res = f"{self._base.get('OCNRES', '025'):03d}" - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocn" - data = f"{prefix}/{ocn_res}/MOM.res.nc" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocean" + data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) if ocn_res in ['025']: # 0.25 degree ocean model also has these additional restarts for res in [f'res_{res_index}' for res_index in range(1, 4)]: - data = f"{prefix}/{ocn_res}/MOM.{res}.nc" + data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) # Ice ICs if self.app_config.do_ice: - ice_res = f"{self._base.get('ICERES', '025'):03d}" - ice_res_dec = f'{float(ice_res) / 100:.2f}' prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice" - data = f"{prefix}/{ice_res}/cice5_model_{ice_res_dec}.res_@Y@m@d@H.nc" + data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) # Wave ICs if self.app_config.do_wave: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wav" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wave" for wave_grid in self._configs['waveinit']['waveGRD'].split(): - data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" + data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index a9810ec3f2..a808cafd91 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -232,16 +232,7 @@ def fill_COMROT_forecasts(host, inputs): """ Implementation of 'fill_COMROT' for forecast-only mode """ - if inputs.system in ['gfs']: - print('forecast-only mode treats ICs differently and cannot be staged here') - elif inputs.system in ['gefs']: # Temporarily copy ICs from icsdir into COM for testing - print('temporary hack to stage gefs ICs for testing') - comrot = os.path.join(inputs.comrot, inputs.pslot) - idatestr = datetime_to_YMDH(inputs.idate) - current_cycle_dir = f"gefs.{idatestr[:8]}" - cmd = f"cp -as {inputs.icsdir}/{current_cycle_dir} {comrot}/{current_cycle_dir}" - os.system(cmd) - return + print('forecast-only mode treats ICs differently and cannot be staged here') def fill_EXPDIR(inputs): From eabc82ad30c118645c0e6e216162d823882a8139 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Mon, 30 Oct 2023 10:31:27 -0400 Subject: [PATCH 16/34] Make the early cycle work with the coupled UFS configured as S2S (#1954) --- jobs/JGLOBAL_FORECAST | 4 ++-- jobs/JGLOBAL_STAGE_IC | 4 ++++ scripts/exglobal_stage_ic.sh | 4 ++-- workflow/rocoto/gfs_tasks.py | 2 +- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 7fb0fbe4f7..e42c81eaa4 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -61,13 +61,13 @@ fi if [[ ${DO_OCN} == "YES" ]]; then YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \ COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS - RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL fi if [[ ${DO_ICE} == "YES" ]]; then YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART - RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL fi diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC index 437c8f40a0..317231871e 100755 --- a/jobs/JGLOBAL_STAGE_IC +++ b/jobs/JGLOBAL_STAGE_IC @@ -3,6 +3,10 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" +# Restart conditions for GFS cycle come from GDAS +# shellcheck disable=SC2153 +rCDUMP=${CDUMP} +[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" # Execute the Script "${HOMEgfs}/scripts/exglobal_stage_ic.sh" diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index fc95fa6654..43812adc89 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -52,7 +52,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Stage ocean initial conditions to ROTDIR (warm start) if [[ "${DO_OCN:-}" = "YES" ]]; then - YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}" src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc" tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc" @@ -63,7 +63,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do fi # Stage ice initial conditions to ROTDIR (warm start) if [[ "${DO_ICE:-}" = "YES" ]]; then - YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}" src="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc" tgt="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index d5c5ba2ed1..55055a46ee 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -363,7 +363,7 @@ def landanl(self): def ocnanalprep(self): - ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"]) + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) deps = [] data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc' From 7086ddc4bece6fb2a8c1c2e6090800d4293e410e Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Mon, 30 Oct 2023 19:58:43 -0400 Subject: [PATCH 17/34] Add ocean analysis files to HPSS archiving task (#1985) This PR adds the files under `analysis/ocean` (except those in `bump` and `vrfy`) to the HPSS archiving job, in `gdasocean_analysis.tar`, which were previously not archived. Resolves NOAA-EMC/GDASApp/issues/693 --- jobs/JGLOBAL_ARCHIVE | 1 + scripts/exglobal_archive.sh | 2 +- ush/hpssarch_gen.sh | 9 +++++++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index 1ea8fdfeef..f2828a9fdd 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -17,6 +17,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMO COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART \ COM_OBS COM_TOP \ COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \ + COM_OCEAN_ANALYSIS \ COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION for grid in "0p25" "0p50" "1p00"; do diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 54323a0dd0..dcc864e223 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -238,7 +238,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then #gdasocean if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean" + targrp_list="${targrp_list} gdasocean gdasocean_analysis" fi #gdasice diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 138a5ff656..f09c816e93 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -454,6 +454,8 @@ if [[ ${type} == "gdas" ]]; then touch gdasocean.txt rm -rf gdasocean_restart.txt touch gdasocean_restart.txt + rm -rf gdasocean_analysis.txt + touch gdasocean_analysis.txt head="gdas.t${cyc}z." @@ -468,6 +470,13 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_MED_RESTART/${ROTDIR}\//}/*" } >> gdasocean_restart.txt + { + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" + } >> gdasocean_analysis.txt + fi if [[ ${DO_ICE} = "YES" ]]; then From 7cc137116f1619bf1e5a12eaacb98d17da97c223 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Tue, 31 Oct 2023 07:54:24 -0600 Subject: [PATCH 18/34] Add initial C96/1 deg coupled model support (#1991) --- parm/config/gefs/config.ufs | 35 +++++++++++++++++++++++- parm/config/gfs/config.ocn | 1 - parm/config/gfs/config.resources | 9 ++++++ parm/config/gfs/config.ufs | 41 ++++++++++++++++++++++++++-- parm/ufs/mom6/MOM_input_template_100 | 35 ++++-------------------- ush/parsing_namelists_MOM6.sh | 3 ++ 6 files changed, 90 insertions(+), 34 deletions(-) diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index bfbb0a12cb..563ee8f021 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -256,6 +256,10 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; "100") ntasks_mom6=20 @@ -269,8 +273,15 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi ;; - "50") + "050") ntasks_mom6=60 OCNTIM=3600 NX_GLB=720 @@ -282,6 +293,15 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' eps_imesh="1.0e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' ;; "025") ntasks_mom6=220 @@ -295,6 +315,15 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" eps_imesh="1.0e-1" + TOPOEDITS="" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' ;; *) echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" @@ -311,6 +340,10 @@ if [[ "${skip_mom6}" == "false" ]]; then export MOM6_RIVER_RUNOFF export MOM6_RESTART_SETTING export eps_imesh + export TOPOEDITS + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES fi # CICE6 specific settings diff --git a/parm/config/gfs/config.ocn b/parm/config/gfs/config.ocn index 7d14e3dd52..37f6a966aa 100644 --- a/parm/config/gfs/config.ocn +++ b/parm/config/gfs/config.ocn @@ -11,7 +11,6 @@ export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL # Templated variables in MOM_input_template export MOM6_USE_LI2016="True" # set to False for restart reproducibility export MOM6_THERMO_SPAN="False" -export MOM6_ALLOW_LANDMASK_CHANGES="False" if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then export ODA_INCUPD="True" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 5b9fa17234..1c21048c26 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -370,6 +370,9 @@ elif [[ "${step}" = "ocnanalbmat" ]]; then C384) npes=480 ;; + C96) + npes=16 + ;; C48) npes=16 ;; @@ -392,6 +395,9 @@ elif [[ "${step}" = "ocnanalrun" ]]; then npes=480 export memory_ocnanalchkpt="2.8TB" ;; + C96) + npes=16 + ;; C48) npes=16 ;; @@ -418,6 +424,9 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then C384) export memory_ocnanalchkpt="128GB" ;; + C96) + export memory_ocnanalchkpt="32GB" + ;; C48) export memory_ocnanalchkpt="32GB" ;; diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 3ab0c5f28d..3d2e36771a 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -256,6 +256,10 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; "100") ntasks_mom6=20 @@ -264,13 +268,22 @@ if [[ "${skip_mom6}" == "false" ]]; then NY_GLB=320 DT_DYNAM_MOM6='1800' DT_THERM_MOM6='3600' - FRUNOFF="" + FRUNOFF="runoff.daitren.clim.1deg.nc" CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" - MOM6_RESTART_SETTING='n' + MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' ;; - "50") + "050") ntasks_mom6=60 OCNTIM=3600 NX_GLB=720 @@ -282,6 +295,15 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; "025") ntasks_mom6=220 @@ -295,6 +317,15 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; *) echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" @@ -308,9 +339,13 @@ if [[ "${skip_mom6}" == "false" ]]; then export DT_DYNAM_MOM6 DT_THERM_MOM6 export FRUNOFF export CHLCLIM + export TOPOEDITS export MOM6_RIVER_RUNOFF export MOM6_RESTART_SETTING export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES fi # CICE6 specific settings diff --git a/parm/ufs/mom6/MOM_input_template_100 b/parm/ufs/mom6/MOM_input_template_100 index 8b616ad27f..5c671fe9d3 100644 --- a/parm/ufs/mom6/MOM_input_template_100 +++ b/parm/ufs/mom6/MOM_input_template_100 @@ -74,31 +74,6 @@ WRITE_GEOM = 2 ! default = 1 SAVE_INITIAL_CONDS = False ! [Boolean] default = False ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE. -! === module MOM_oda_incupd === -ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False - ! If true, oda incremental updates will be applied - ! everywhere in the domain. -ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. - -ODA_TEMPINC_VAR = "pt_inc" ! default = "ptemp_inc" - ! The name of the potential temperature inc. variable in - ! ODA_INCUPD_FILE. -ODA_SALTINC_VAR = "s_inc" ! default = "sal_inc" - ! The name of the salinity inc. variable in - ! ODA_INCUPD_FILE. -ODA_THK_VAR = "h_fg" ! default = "h" - ! The name of the int. depth inc. variable in - ! ODA_INCUPD_FILE. - -ODA_UINC_VAR = "u_inc" ! default = "u_inc" - ! The name of the zonal vel. inc. variable in - ! ODA_INCUPD_UV_FILE. -ODA_VINC_VAR = "v_inc" ! default = "v_inc" - ! The name of the meridional vel. inc. variable in - ! ODA_INCUPD_UV_FILE. -ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 - ! Number of hours for full update (0=direct insertion). - ! === module MOM_domains === TRIPOLAR_N = True ! [Boolean] default = False ! Use tripolar connectivity at the northern edge of the domain. With @@ -346,7 +321,7 @@ DIAG_COORDS = "z Z ZSTAR" ! A list of string tuples associating diag_table modules to ! a coordinate definition used for diagnostics. Each string ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". -DIAG_COORD_DEF_Z="FILE:interpolate_zgrid_40L.nc,interfaces=zw" +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" DIAG_MISVAL = -1e34 !AVAILABLE_DIAGS_FILE = "available_diags.002160" ! default = "available_diags.000000" ! A file into which to write a list of all available ocean diagnostics that can @@ -831,6 +806,8 @@ ENERGYSAVEDAYS = 0.25 ! [days] default = 1.0 ! other globally summed diagnostics. ! === module ocean_model_init === + +! === module MOM_oda_incupd === ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False ! If true, oda incremental updates will be applied ! everywhere in the domain. @@ -845,11 +822,11 @@ ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" ODA_THK_VAR = "h" ! default = "h" ! The name of the int. depth inc. variable in ! ODA_INCUPD_FILE. -ODA_INCUPD_UV = false ! -!ODA_UINC_VAR = "u" ! default = "u_inc" +ODA_INCUPD_UV = true ! +ODA_UINC_VAR = "u" ! default = "u_inc" ! The name of the zonal vel. inc. variable in ! ODA_INCUPD_UV_FILE. -!ODA_VINC_VAR = "v" ! default = "v_inc" +ODA_VINC_VAR = "v" ! default = "v_inc" ! The name of the meridional vel. inc. variable in ! ODA_INCUPD_UV_FILE. ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index 4dceb845a2..8059096363 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -75,6 +75,9 @@ sed -e "s/@\[DT_THERM_MOM6\]/${DT_THERM_MOM6}/g" \ -e "s/@\[CHLCLIM\]/${CHLCLIM}/g" \ -e "s/@\[DO_OCN_SPPT\]/${OCN_SPPT}/g" \ -e "s/@\[PERT_EPBL\]/${PERT_EPBL}/g" \ + -e "s/@\[MOM6_DIAG_COORD_DEF_Z_FILE\]/${MOM6_DIAG_COORD_DEF_Z_FILE}/g" \ + -e "s/@\[TOPOEDITS\]/${TOPOEDITS}/g" \ + -e "s/@\[MOM6_DIAG_MISVAL\]/${MOM6_DIAG_MISVAL}/g" \ -e "s/@\[ODA_INCUPD_NHOURS\]/${ODA_INCUPD_NHOURS}/g" \ -e "s/@\[ODA_INCUPD\]/${ODA_INCUPD}/g" "${DATA}/INPUT/MOM_input_template_${OCNRES}" > "${DATA}/INPUT/MOM_input" rm "${DATA}/INPUT/MOM_input_template_${OCNRES}" From b2e4a2e9d269842acbb1b37dd563ee5dee2adf0f Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Tue, 31 Oct 2023 13:57:02 +0000 Subject: [PATCH 19/34] Allow CI case to exclude running on certain platforms (#2001) --- workflow/create_experiment.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py index c343f1d798..3a5d21aadf 100755 --- a/workflow/create_experiment.py +++ b/workflow/create_experiment.py @@ -18,6 +18,7 @@ """ import os +import sys from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter from pathlib import Path @@ -27,6 +28,7 @@ import setup_expt import setup_xml +from hosts import Host _here = os.path.dirname(__file__) _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) @@ -75,6 +77,12 @@ def input_args(): data.update(os.environ) testconf = parse_j2yaml(path=user_inputs.yaml, data=data) + if 'exclude' in testconf: + host = Host() + if host.machine.lower() in [excluded_host.lower() for excluded_host in testconf.exclude]: + logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}') + sys.exit(0) + # Create a list of arguments to setup_expt.py setup_expt_args = [testconf.experiment.system, testconf.experiment.mode] for kk, vv in testconf.arguments.items(): From 872068cb5ea78b311f7a77e97ee1428ae027cb61 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Tue, 31 Oct 2023 14:37:55 +0000 Subject: [PATCH 20/34] Add output error log to user for create_experiment.py on fail in CI (#1974) Small changes to driver CI bash scripts to recapture error log from `create_experment.py` logger Fixes #1939 --- ci/scripts/driver.sh | 3 ++- workflow/create_experiment.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index a79ecfd6cf..00143fa049 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -154,6 +154,7 @@ for pr in ${pr_list}; do export pslot="${case}_${pr_sha}" rm -Rf "${STMP}/RUNDIRS/${pslot}" set +e + export LOGFILE_PATH="${HOMEgfs}/ci/scripts/create_experiment.log" "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" ci_status=$? set -e @@ -169,7 +170,7 @@ for pr in ${pr_list}; do echo "Failed to create experiment: *FAIL* ${pslot}" echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true echo "" - cat "${HOMEgfs}/ci/scripts/"setup_*.std* + cat "${LOGFILE_PATH}" } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py index 3a5d21aadf..bfc87672f4 100755 --- a/workflow/create_experiment.py +++ b/workflow/create_experiment.py @@ -34,7 +34,7 @@ _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) # Setup the logger -logger = Logger(level=os.environ.get("LOGGING_LEVEL", "INFO"), colored_log=True) +logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) @logit(logger) From 67c050c201f92e855d09211e41a5f51ee1cb5230 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 31 Oct 2023 19:00:47 +0000 Subject: [PATCH 21/34] Add missing export for rCDUMP in stage_ic (#2009) rCDUMP is needed by the exscript but was never exported by the jjob. --- jobs/JGLOBAL_STAGE_IC | 1 + 1 file changed, 1 insertion(+) diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC index 317231871e..4c94990fde 100755 --- a/jobs/JGLOBAL_STAGE_IC +++ b/jobs/JGLOBAL_STAGE_IC @@ -7,6 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" # shellcheck disable=SC2153 rCDUMP=${CDUMP} [[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" +export rCDUMP # Execute the Script "${HOMEgfs}/scripts/exglobal_stage_ic.sh" From 517b92f9c20c9e44abba3183325e35a95a7334a5 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Tue, 31 Oct 2023 16:33:11 -0400 Subject: [PATCH 22/34] Parallelize the build scripts (#1998) Optimized/parallelized build scripts. A few notes: 1) The default number of build jobs is 20, but one could argue for using 40. When researching this, I looked up what the SRW is compiling with. That system uses 40 cores, which seems a little excessive, but on testing the global workflow, the actual number of cores being used at any given time rarely exceeds 16 when running with 40 cores. This is because the builds tend to use multiple threads in the beginning when compiling low-level modules while the higher-level modules are more or less serial AND because the GDASApp takes several minutes to initialize all of its subrepositories by which time the smaller builds are complete. 2) I also updated checkout.sh so that all checkouts are simultaneous. The CPU load for `git submodule` is quite low, so running 16 instead of 8 jobs at once is not much more expensive. 3) To make this work, I had to add `-j` options to most of the build scripts. The only exception is build_upp, for which the build script within the UPP is hard coded to use 6 cores. 4) I fixed a few small bugs in the build scripts along the way. 5) Lastly, this reduce the total build time from ~2.5 hours for the entire system (including GDAS and GSI in the same build) to ~40 minutes when running with `-j 40`. Resolves #1978 --- sorc/build_all.sh | 293 +++++++++++++++++++------------------- sorc/build_gdas.sh | 3 +- sorc/build_gfs_utils.sh | 8 +- sorc/build_gsi_enkf.sh | 5 +- sorc/build_gsi_monitor.sh | 5 +- sorc/build_gsi_utils.sh | 5 +- sorc/build_ufs.sh | 13 +- sorc/build_ufs_utils.sh | 22 ++- sorc/build_upp.sh | 2 - sorc/build_ww3prepost.sh | 8 +- sorc/checkout.sh | 2 +- sorc/gfs_build.cfg | 2 +- sorc/partial_build.sh | 3 +- 13 files changed, 197 insertions(+), 174 deletions(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 19f1ae6c92..95183f9065 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -16,13 +16,15 @@ function _usage() { Builds all of the global-workflow components by calling the individual build scripts in sequence. -Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-v] +Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v] -a UFS_app: Build a specific UFS app instead of the default -c build_config: Selectively build based on the provided config instead of the default config -h: print this help message and exit + -j: + Specify maximum number of build jobs (n) -v: Execute all build scripts with -v option to turn on verbose where supported EOF @@ -33,25 +35,25 @@ script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) cd "${script_dir}" || exit 1 _build_ufs_opt="" -_ops_opt="" _verbose_opt="" _partial_opt="" +_build_job_max=20 # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:hov" option; do +while getopts ":a:c:j:hv" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; c) _partial_opt+="-c ${OPTARG} ";; h) _usage;; - o) _ops_opt+="-o";; + j) _build_job_max="${OPTARG} ";; v) _verbose_opt="-v";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage + _usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage + _usage ;; esac done @@ -105,170 +107,161 @@ ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} # shellcheck disable= err=0 -#------------------------------------ -# build gfs_utils -#------------------------------------ -if [[ ${Build_gfs_utils} == 'true' ]]; then - echo " .... Building gfs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_gfs_utils.sh ${_verbose_opt} > "${logs_dir}/build_gfs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gfs_utils." - echo "The log file is in ${logs_dir}/build_gfs_utils.log" - fi - err=$((err + rc)) -fi +declare -A build_jobs +declare -A build_opts #------------------------------------ -# build WW3 pre & post execs +# Check which builds to do and assign # of build jobs #------------------------------------ -if [[ ${Build_ww3_prepost} == "true" ]]; then - echo " .... Building WW3 pre and post execs .... " - # shellcheck disable=SC2086,SC2248 - ./build_ww3prepost.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ww3_prepost.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building WW3 pre/post processing." - echo "The log file is in ${logs_dir}/build_ww3_prepost.log" - fi - err=$((err + rc)) -fi -#------------------------------------ -# build forecast model -#------------------------------------ +# Mandatory builds, unless otherwise specified, for the UFS +big_jobs=0 if [[ ${Build_ufs_model} == 'true' ]]; then - echo " .... Building forecast model .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ufs.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UFS model." - echo "The log file is in ${logs_dir}/build_ufs.log" - fi - err=$((err + rc)) + build_jobs["ufs"]=8 + big_jobs=$((big_jobs+1)) + build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" fi - -#------------------------------------ -# build GSI and EnKF - optional checkout -#------------------------------------ -if [[ -d gsi_enkf.fd ]]; then - if [[ ${Build_gsi_enkf} == 'true' ]]; then - echo " .... Building gsi and enkf .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_enkf.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_enkf.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi_enkf." - echo "The log file is in ${logs_dir}/build_gsi_enkf.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi and enkf .... " +# The UPP is hardcoded to use 6 cores +if [[ ${Build_upp} == 'true' ]]; then + build_jobs["upp"]=6 + build_opts["upp"]="" fi - -#------------------------------------ -# build gsi utilities -#------------------------------------ -if [[ -d gsi_utils.fd ]]; then - if [[ ${Build_gsi_utils} == 'true' ]]; then - echo " .... Building gsi utilities .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_utils.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi utilities." - echo "The log file is in ${logs_dir}/build_gsi_utils.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi utilities .... " +if [[ ${Build_ufs_utils} == 'true' ]]; then + build_jobs["ufs_utils"]=3 + build_opts["ufs_utils"]="${_verbose_opt}" +fi +if [[ ${Build_gfs_utils} == 'true' ]]; then + build_jobs["gfs_utils"]=1 + build_opts["gfs_utils"]="${_verbose_opt}" +fi +if [[ ${Build_ww3prepost} == "true" ]]; then + build_jobs["ww3prepost"]=3 + build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" fi -#------------------------------------ -# build gdas - optional checkout -#------------------------------------ +# Optional DA builds if [[ -d gdas.cd ]]; then - if [[ ${Build_gdas} == 'true' ]]; then - echo " .... Building GDASApp .... " - # shellcheck disable=SC2086,SC2248 - ./build_gdas.sh ${_verbose_opt} > "${logs_dir}/build_gdas.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building GDASApp." - echo "The log file is in ${logs_dir}/build_gdas.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building GDASApp .... " + build_jobs["gdas"]=16 + big_jobs=$((big_jobs+1)) + build_opts["gdas"]="${_verbose_opt}" +fi +if [[ -d gsi_enkf.fd ]]; then + build_jobs["gsi_enkf"]=8 + big_jobs=$((big_jobs+1)) + build_opts["gsi_enkf"]="${_verbose_opt}" +fi +if [[ -d gsi_utils.fd ]]; then + build_jobs["gsi_utils"]=2 + build_opts["gsi_utils"]="${_verbose_opt}" fi - -#------------------------------------ -# build gsi monitor -#------------------------------------ if [[ -d gsi_monitor.fd ]]; then - if [[ ${Build_gsi_monitor} == 'true' ]]; then - echo " .... Building gsi monitor .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_monitor.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_monitor.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi monitor." - echo "The log file is in ${logs_dir}/build_gsi_monitor.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi monitor .... " + build_jobs["gsi_monitor"]=1 + build_opts["gsi_monitor"]="${_verbose_opt}" fi -#------------------------------------ -# build UPP -#------------------------------------ -if [[ ${Build_upp} == 'true' ]]; then - echo " .... Building UPP .... " - # shellcheck disable=SC2086,SC2248 - ./build_upp.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_upp.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UPP." - echo "The log file is in ${logs_dir}/build_upp.log" - fi - err=$((err + rc)) -fi +# Go through all builds and adjust CPU counts down if necessary +requested_cpus=0 +build_list="" +for build in "${!build_jobs[@]}"; do + if [[ -z "${build_list}" ]]; then + build_list="${build}" + else + build_list="${build_list}, ${build}" + fi + if [[ ${build_jobs[${build}]} -gt ${_build_job_max} ]]; then + build_jobs[${build}]=${_build_job_max} + fi + requested_cpus=$(( requested_cpus + build_jobs[${build}] )) +done -#------------------------------------ -# build ufs_utils -#------------------------------------ -if [[ ${Build_ufs_utils} == 'true' ]]; then - echo " .... Building ufs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs_utils.sh ${_verbose_opt} > "${logs_dir}/build_ufs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building ufs_utils." - echo "The log file is in ${logs_dir}/build_ufs_utils.log" - fi - err=$((err + rc)) +echo "Building ${build_list}" + +# Go through all builds and adjust CPU counts up if possible +if [[ ${requested_cpus} -lt ${_build_job_max} && ${big_jobs} -gt 0 ]]; then + # Add cores to the gdas, ufs, and gsi build jobs + extra_cores=$(( _build_job_max - requested_cpus )) + extra_cores=$(( extra_cores / big_jobs )) + for build in "${!build_jobs[@]}"; do + if [[ "${build}" == "gdas" || "${build}" == "ufs" || "${build}" == "gsi_enkf" ]]; then + build_jobs[${build}]=$(( build_jobs[${build}] + extra_cores )) + fi + done fi +procs_in_use=0 +declare -A build_ids + +builds_started=0 +# Now start looping through all of the jobs until everything is done +while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do + for build in "${!build_jobs[@]}"; do + # Has the job started? + if [[ -n "${build_jobs[${build}]+0}" && -z "${build_ids[${build}]+0}" ]]; then + # Do we have enough processors to run it? + if [[ ${_build_job_max} -ge $(( build_jobs[build] + procs_in_use )) ]]; then + if [[ "${build}" != "upp" ]]; then + "./build_${build}.sh" -j "${build_jobs[${build}]}" "${build_opts[${build}]:-}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + else + "./build_${build}.sh" "${build_opts[${build}]}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + fi + build_ids["${build}"]=$! + echo "Starting build_${build}.sh" + procs_in_use=$(( procs_in_use + build_jobs[${build}] )) + fi + fi + done + + # Check if all builds have completed + # Also recalculate how many processors are in use to account for completed builds + builds_started=0 + procs_in_use=0 + for build in "${!build_jobs[@]}"; do + # Has the build started? + if [[ -n "${build_ids[${build}]+0}" ]]; then + builds_started=$(( builds_started + 1)) + # Calculate how many processors are in use + # Is the build still running? + if ps -p "${build_ids[${build}]}" > /dev/null; then + procs_in_use=$(( procs_in_use + build_jobs["${build}"] )) + fi + fi + done + + sleep 5s +done + +# Wait for all jobs to complete and check return statuses +errs=0 +while [[ ${#build_jobs[@]} -gt 0 ]]; do + for build in "${!build_jobs[@]}"; do + # Test if each job is complete and if so, notify and remove from the array + if [[ -n "${build_ids[${build}]+0}" ]]; then + if ! ps -p "${build_ids[${build}]}" > /dev/null; then + wait "${build_ids[${build}]}" + build_stat=$? + errs=$((errs+build_stat)) + if [[ ${build_stat} == 0 ]]; then + echo "build_${build}.sh completed successfully!" + else + echo "build_${build}.sh failed with status ${build_stat}!" + fi + + # Remove the completed build from the list of PIDs + unset 'build_ids[${build}]' + unset 'build_jobs[${build}]' + fi + fi + done + + sleep 5s +done + #------------------------------------ # Exception Handling #------------------------------------ -if (( err != 0 )); then +if (( errs != 0 )); then cat << EOF BUILD ERROR: One or more components failed to build Check the associated build log(s) for details. diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh index 39cf5ac9a7..b1a17c33dd 100755 --- a/sorc/build_gdas.sh +++ b/sorc/build_gdas.sh @@ -2,9 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; + j) export BUILD_JOBS=${OPTARG};; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh index 2a7a611239..09bd4a9656 100755 --- a/sorc/build_gfs_utils.sh +++ b/sorc/build_gfs_utils.sh @@ -5,11 +5,13 @@ function usage() { cat << EOF Builds the GFS utility programs. -Usage: ${BASH_SOURCE[0]} [-d][-h][-v] +Usage: ${BASH_SOURCE[0]} [-d][-h][-j n][-v] -d: Build with debug options -h: Print this help message and exit + -j: + Build with n build jobs -v: Turn on verbose output EOF @@ -19,10 +21,11 @@ EOF cwd=$(pwd) OPTIND=1 -while getopts ":dvh" option; do +while getopts ":j:dvh" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; v) export BUILD_VERBOSE="YES";; + j) export BUILD_JOBS="${OPTARG}";; h) usage ;; @@ -40,6 +43,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gfs_utils.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh index 671c3d6205..9ba278e3ec 100755 --- a/sorc/build_gsi_enkf.sh +++ b/sorc/build_gsi_enkf.sh @@ -2,10 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -21,6 +21,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ GSI_MODE=GFS \ ENKF_MODE=GFS \ REGRESSION_TESTS=NO \ diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh index ec3645e52f..3de1262aac 100755 --- a/sorc/build_gsi_monitor.sh +++ b/sorc/build_gsi_monitor.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gsi_monitor.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh index bcbc110cf6..81eab0f628 100755 --- a/sorc/build_gsi_utils.sh +++ b/sorc/build_gsi_utils.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; # TODO - unused; remove? + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ "${cwd}/gsi_utils.fd/ush/build.sh" diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index aa7183c006..aaeeefea23 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -7,10 +7,11 @@ cwd=$(pwd) APP="S2SWA" CCPP_SUITES="FV3_GFS_v17_p8,FV3_GFS_v17_coupled_p8" # TODO: does the g-w need to build with all these CCPP_SUITES? -while getopts ":da:v" option; do +while getopts ":da:j:v" option; do case "${option}" in d) BUILD_TYPE="DEBUG";; - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -38,21 +39,23 @@ if [[ "${MACHINE_ID}" != "noaacloud" ]]; then mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua else - + if [[ "${PW_CSP:-}" == "aws" ]]; then + set +x # TODO: This will need to be addressed further when the EPIC stacks are available/supported. module use /contrib/spack-stack/envs/ufswm/install/modulefiles/Core module load stack-intel module load stack-intel-oneapi-mpi module load ufs-weather-model-env/1.0.0 - # TODO: It is still uncertain why this is the only module that is + # TODO: It is still uncertain why this is the only module that is # missing; check the spack build as this needed to be added manually. module load w3emc/2.9.2 # TODO: This has similar issues for the EPIC stack. module list + set -x fi export CMAKE_FLAGS="${MAKE_OPT}" - ./build.sh + BUILD_JOBS=${BUILD_JOBS:-8} ./build.sh mv "${cwd}/ufs_model.fd/build/ufs_model" "${cwd}/ufs_model.fd/tests/ufs_model.x" fi diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 5e2edf0737..e78ca3c180 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -1,10 +1,30 @@ #! /usr/bin/env bash set -eux +OPTIND=1 +while getopts ":j:dv" option; do + case "${option}" in + j) export BUILD_JOBS="${OPTARG}";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + script_dir=$(dirname "${BASH_SOURCE[0]}") cd "${script_dir}/ufs_utils.fd" || exit 1 -CMAKE_OPTS="-DGFS=ON" ./build_all.sh +CMAKE_OPTS="-DGFS=ON" \ +BUILD_JOBS=${BUILD_JOBS:-8} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-} \ +./build_all.sh exit diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index a00650ba16..a55e96ebc8 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -12,11 +12,9 @@ while getopts ":dv" option; do v) _opts+="-v ";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage ;; esac done diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index 7024a7bba1..919afaacb3 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -7,9 +7,10 @@ cd "${script_dir}" || exit 1 # Default settings APP="S2SWA" -while getopts "a:v" option; do +while getopts ":j:a:v" option; do case "${option}" in - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -58,6 +59,7 @@ post_exes="ww3_outp ww3_outf ww3_outp ww3_gint ww3_ounf ww3_ounp ww3_grib" #create build directory: path_build="${WW3_DIR}/build_SHRD" +[[ -d "${path_build}" ]] && rm -rf "${path_build}" mkdir -p "${path_build}" || exit 1 cd "${path_build}" || exit 1 echo "Forcing a SHRD build" @@ -85,7 +87,7 @@ if (( rc != 0 )); then echo "Fatal error in cmake." exit "${rc}" fi -make -j 8 +make -j "${BUILD_JOBS:-8}" rc=$? if (( rc != 0 )); then echo "Fatal error in make." diff --git a/sorc/checkout.sh b/sorc/checkout.sh index a756c8d040..de4fcdf838 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 # Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" ; errs=$((errs + $?)) +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" & # Run all other checkouts simultaneously with just 1 core each to handle submodules. checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg index 4dd9b1b74e..8c171072d0 100644 --- a/sorc/gfs_build.cfg +++ b/sorc/gfs_build.cfg @@ -2,7 +2,7 @@ # ***** configuration of global-workflow build ***** Building ufs_model (ufs_model) ........................ yes - Building ww3_prepost (ww3_prepost) .................... yes + Building ww3prepost (ww3prepost) ...................... yes Building gsi_enkf (gsi_enkf) .......................... yes Building gsi_utils (gsi_utils) ........................ yes Building gsi_monitor (gsi_monitor) .................... yes diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index 64f1a705ec..34b8b557ce 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -3,11 +3,10 @@ # define the array of the name of build program # declare -a Build_prg=("Build_ufs_model" \ - "Build_ww3_prepost" \ + "Build_ww3prepost" \ "Build_gsi_enkf" \ "Build_gsi_utils" \ "Build_gsi_monitor" \ - "Build_ww3_prepost" \ "Build_gdas" \ "Build_upp" \ "Build_ufs_utils" \ From 77be0ec6a3117b9f7e71a19c69ac64c09b521ecd Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Wed, 1 Nov 2023 12:09:33 -0600 Subject: [PATCH 23/34] No longer write archive lists COM (#2000) Changes the respective `.txt` files used for the HPSS archive to the `DATA` path. Each defined text file has been updated to explicitly write to `DATA` rather than `COM` such that the files are deleted rather than being archived. Resolves #760 --- scripts/exglobal_archive.sh | 12 +- ush/hpssarch_gen.sh | 214 ++++++++++++++++++------------------ 2 files changed, 110 insertions(+), 116 deletions(-) diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index dcc864e223..18217f4efc 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -154,11 +154,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then mod=$((nday % ARCH_FCSTICFREQ)) if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi - - ARCH_LIST="${DATA}/archlist" - [[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" - mkdir -p "${ARCH_LIST}" - cd "${ARCH_LIST}" || exit 2 + cd "${DATA}" || exit 2 "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" status=$? @@ -196,7 +192,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then if [ "${DO_AERO}" = "YES" ]; then for targrp in chem; do # TODO: Why is this tar being done here instead of being added to the list? - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${DATA}/${targrp}.txt") status=$? if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" @@ -276,7 +272,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then break fi fi - done < "${ARCH_LIST}/${targrp}.txt" + done < "${DATA}/${targrp}.txt" ;; *) ;; @@ -284,7 +280,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then # Create the tarball tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar" - ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${targrp}.txt") + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${targrp}.txt") status=$? # Change group to rstprod if it was found even if htar/tar failed in case of partial creation diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index f09c816e93..8d8ebf4fe9 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -27,30 +27,30 @@ if [[ ${type} = "gfs" ]]; then FHMAX_HF_GFS=${FHMAX_HF_GFS:-120} FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} - rm -f gfsa.txt - rm -f gfsb.txt - rm -f gfs_restarta.txt - touch gfsa.txt - touch gfsb.txt - touch gfs_restarta.txt + rm -f "${DATA}/gfsa.txt" + rm -f "${DATA}/gfsb.txt" + rm -f "${DATA}/gfs_restarta.txt" + touch "${DATA}/gfsa.txt" + touch "${DATA}/gfsb.txt" + touch "${DATA}/gfs_restarta.txt" if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - rm -f gfs_pgrb2b.txt - rm -f gfs_netcdfb.txt - rm -f gfs_flux.txt - touch gfs_pgrb2b.txt - touch gfs_netcdfb.txt - touch gfs_flux.txt + rm -f "${DATA}/gfs_pgrb2b.txt" + rm -f "${DATA}/gfs_netcdfb.txt" + rm -f "${DATA}/gfs_flux.txt" + touch "${DATA}/gfs_pgrb2b.txt" + touch "${DATA}/gfs_netcdfb.txt" + touch "${DATA}/gfs_flux.txt" if [[ ${MODE} = "cycled" ]]; then - rm -f gfs_netcdfa.txt - touch gfs_netcdfa.txt + rm -f "${DATA}/gfs_netcdfa.txt" + touch "${DATA}/gfs_netcdfa.txt" fi fi if [[ ${DO_DOWN} = "YES" ]]; then - rm -f gfs_downstream.txt - touch gfs_downstream.txt + rm -f "${DATA}/gfs_downstream.txt" + touch "${DATA}/gfs_downstream.txt" fi head="gfs.t${cyc}z." @@ -61,7 +61,7 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" - } >> gfs_pgrb2b.txt + } >> "${DATA}/gfs_pgrb2b.txt" if [[ ${MODE} = "cycled" ]]; then { @@ -73,7 +73,7 @@ if [[ ${type} = "gfs" ]]; then for file in "${gsida_files[@]}"; do [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" done - } >> gfs_netcdfa.txt + } >> "${DATA}/gfs_netcdfa.txt" fi fh=0 @@ -82,7 +82,7 @@ if [[ ${type} = "gfs" ]]; then { echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" - } >> gfs_netcdfb.txt + } >> "${DATA}/gfs_netcdfb.txt" fh=$((fh+ARCH_GAUSSIAN_FHINC)) done fi @@ -130,7 +130,7 @@ if [[ ${type} = "gfs" ]]; then for file in "${genesis_files[@]}"; do [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" done - } >> gfsa.txt + } >> "${DATA}/gfsa.txt" { if [[ ${DO_DOWN} = "YES" ]]; then @@ -142,14 +142,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" fi fi - } >> gfs_downstream.txt + } >> "${DATA}/gfs_downstream.txt" { echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" - } >> gfsb.txt + } >> "${DATA}/gfsb.txt" fh=0 @@ -159,7 +159,7 @@ if [[ ${type} = "gfs" ]]; then { echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" - } >> gfs_flux.txt + } >> "${DATA}/gfs_flux.txt" { echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" @@ -168,14 +168,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" fi - } >> gfs_pgrb2b.txt + } >> "${DATA}/gfs_pgrb2b.txt" fi { echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" - } >> gfsa.txt + } >> "${DATA}/gfsa.txt" { @@ -187,7 +187,7 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" fi - } >> gfsb.txt + } >> "${DATA}/gfsb.txt" inc=${FHOUT_GFS} if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then @@ -221,14 +221,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" fi - } >> gfs_restarta.txt + } >> "${DATA}/gfs_restarta.txt" #.................. if [[ ${DO_WAVE} = "YES" ]]; then - rm -rf gfswave.txt - touch gfswave.txt + rm -rf "${DATA}/gfswave.txt" + touch "${DATA}/gfswave.txt" head="gfswave.t${cyc}z." @@ -237,60 +237,60 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> gfswave.txt + } >> "${DATA}/gfswave.txt" fi if [[ ${DO_OCN} = "YES" ]]; then head="gfs.t${cyc}z." - rm -f gfs_flux_1p00.txt - rm -f ocn_ice_grib2_0p5.txt - rm -f ocn_ice_grib2_0p25.txt - rm -f ocn_2D.txt - rm -f ocn_3D.txt - rm -f ocn_xsect.txt - rm -f ocn_daily.txt - touch gfs_flux_1p00.txt - touch ocn_ice_grib2_0p5.txt - touch ocn_ice_grib2_0p25.txt - touch ocn_2D.txt - touch ocn_3D.txt - touch ocn_xsect.txt - touch ocn_daily.txt - echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> ocn_2D.txt - echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> ocn_2D.txt - echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> ocn_3D.txt - echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> ocn_xsect.txt - echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> ocn_daily.txt - echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> ocn_ice_grib2_0p5.txt - echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> ocn_ice_grib2_0p25.txt + rm -f "${DATA}/gfs_flux_1p00.txt" + rm -f "${DATA}/ocn_ice_grib2_0p5.txt" + rm -f "${DATA}/ocn_ice_grib2_0p25.txt" + rm -f "${DATA}/ocn_2D.txt" + rm -f "${DATA}/ocn_3D.txt" + rm -f "${DATA}/ocn_xsect.txt" + rm -f "${DATA}/ocn_daily.txt" + touch "${DATA}/gfs_flux_1p00.txt" + touch "${DATA}/ocn_ice_grib2_0p5.txt" + touch "${DATA}/ocn_ice_grib2_0p25.txt" + touch "${DATA}/ocn_2D.txt" + touch "${DATA}/ocn_3D.txt" + touch "${DATA}/ocn_xsect.txt" + touch "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> "${DATA}/ocn_3D.txt" + echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> "${DATA}/ocn_xsect.txt" + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> "${DATA}/ocn_ice_grib2_0p5.txt" + echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> "${DATA}/ocn_ice_grib2_0p25.txt" # Also save fluxes from atmosphere { echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" - } >> gfs_flux_1p00.txt + } >> "${DATA}/gfs_flux_1p00.txt" fi if [[ ${DO_ICE} = "YES" ]]; then head="gfs.t${cyc}z." - rm -f ice.txt - touch ice.txt + rm -f "${DATA}/ice.txt" + touch "${DATA}/ice.txt" { echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc" - } >> ice.txt + } >> "${DATA}/ice.txt" fi if [[ ${DO_AERO} = "YES" ]]; then head="gocart" - rm -f chem.txt - touch chem.txt + rm -f "${DATA}/chem.txt" + touch "${DATA}/chem.txt" - echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> chem.txt + echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> "${DATA}/chem.txt" fi #----------------------------------------------------- @@ -303,12 +303,12 @@ fi ##end of gfs if [[ ${type} == "gdas" ]]; then #----------------------------------------------------- - rm -f gdas.txt - rm -f gdas_restarta.txt - rm -f gdas_restartb.txt - touch gdas.txt - touch gdas_restarta.txt - touch gdas_restartb.txt + rm -f "${DATA}/gdas.txt" + rm -f "${DATA}/gdas_restarta.txt" + rm -f "${DATA}/gdas_restartb.txt" + touch "${DATA}/gdas.txt" + touch "${DATA}/gdas_restarta.txt" + touch "${DATA}/gdas_restartb.txt" head="gdas.t${cyc}z." @@ -379,14 +379,14 @@ if [[ ${type} == "gdas" ]]; then echo "${file}.idx" fi done - } >> gdas.txt + } >> "${DATA}/gdas.txt" #.................. if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> gdas_restarta.txt + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> "${DATA}/gdas_restarta.txt" fi if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> gdas_restarta.txt + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> "${DATA}/gdas_restarta.txt" fi { @@ -422,18 +422,18 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - } >> gdas_restarta.txt + } >> "${DATA}/gdas_restarta.txt" #.................. - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> gdas_restartb.txt + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> "${DATA}/gdas_restartb.txt" #.................. if [[ ${DO_WAVE} = "YES" ]]; then - rm -rf gdaswave.txt - touch gdaswave.txt - rm -rf gdaswave_restart.txt - touch gdaswave_restart.txt + rm -rf "${DATA}/gdaswave.txt" + touch "${DATA}/gdaswave.txt" + rm -rf "${DATA}/gdaswave_restart.txt" + touch "${DATA}/gdaswave_restart.txt" head="gdaswave.t${cyc}z." @@ -441,21 +441,19 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> gdaswave.txt + } >> "${DATA}/gdaswave.txt" - echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> gdaswave_restart.txt + echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdaswave_restart.txt" fi #.................. if [[ ${DO_OCN} = "YES" ]]; then - rm -rf gdasocean.txt - touch gdasocean.txt - rm -rf gdasocean_restart.txt - touch gdasocean_restart.txt - rm -rf gdasocean_analysis.txt - touch gdasocean_analysis.txt + rm -rf "${DATA}/gdasocean.txt" + touch "${DATA}/gdasocean.txt" + rm -rf "${DATA}/gdasocean_restart.txt" + touch "${DATA}/gdasocean_restart.txt" head="gdas.t${cyc}z." @@ -463,28 +461,28 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" - } >> gdasocean.txt + } >> "${DATA}/gdasocean.txt" { echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" echo "${COM_MED_RESTART/${ROTDIR}\//}/*" - } >> gdasocean_restart.txt + } >> "${DATA}/gdasocean_restart.txt" { echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> gdasocean_analysis.txt + } >> "${DATA}/gdasocean_analysis.txt" fi if [[ ${DO_ICE} = "YES" ]]; then - rm -rf gdasice.txt - touch gdasice.txt - rm -rf gdasice_restart.txt - touch gdasice_restart.txt + rm -rf "${DATA}/gdasice.txt" + touch "${DATA}/gdasice.txt" + rm -rf "${DATA}/gdasice_restart.txt" + touch "${DATA}/gdasice_restart.txt" head="gdas.t${cyc}z." @@ -492,9 +490,9 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" - } >> gdasice.txt + } >> "${DATA}/gdasice.txt" - echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> gdasice_restart.txt + echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdasice_restart.txt" fi @@ -522,8 +520,8 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then head="${RUN}.t${cyc}z." #.................. - rm -f "${RUN}.txt" - touch "${RUN}.txt" + rm -f "${DATA}/${RUN}.txt" + touch "${DATA}/${RUN}.txt" { gsida_files=("enkfstat" @@ -581,19 +579,19 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then fi fh=$((fh+3)) done - } >> "${RUN}.txt" + } >> "${DATA}/${RUN}.txt" #........................... n=1 while (( n <= NTARS )); do #........................... - rm -f "${RUN}_grp${n}.txt" - rm -f "${RUN}_restarta_grp${n}.txt" - rm -f "${RUN}_restartb_grp${n}.txt" - touch "${RUN}_grp${n}.txt" - touch "${RUN}_restarta_grp${n}.txt" - touch "${RUN}_restartb_grp${n}.txt" + rm -f "${DATA}/${RUN}_grp${n}.txt" + rm -f "${DATA}/${RUN}_restarta_grp${n}.txt" + rm -f "${DATA}/${RUN}_restartb_grp${n}.txt" + touch "${DATA}/${RUN}_grp${n}.txt" + touch "${DATA}/${RUN}_restarta_grp${n}.txt" + touch "${DATA}/${RUN}_restartb_grp${n}.txt" m=1 while (( m <= NMEM_EARCGRP )); do @@ -618,11 +616,11 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" fi fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ - >> "${RUN}_restarta_grp${n}.txt" + >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi else @@ -635,10 +633,10 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" fi fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ - >> "${RUN}_restarta_grp${n}.txt" + >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi fi { @@ -646,7 +644,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if (( FHR == 6 )); then echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" done # loop over FHR if [[ ${lobsdiag_forenkf} == ".false." ]] ; then @@ -655,7 +653,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" { if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then @@ -668,7 +666,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" - } >> "${RUN}_restarta_grp${n}.txt" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi #--- { @@ -678,9 +676,9 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - } >> "${RUN}_restarta_grp${n}.txt" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" #--- - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${RUN}_restartb_grp${n}.txt" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${DATA}/${RUN}_restartb_grp${n}.txt" m=$((m+1)) done From 241742b246da1794646b68e41c3d6fe00b95dd07 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:19:55 -0400 Subject: [PATCH 24/34] Updates to prep ocean obs task (#1870) --- jobs/JGLOBAL_PREP_OCEAN_OBS | 10 +++++++++- parm/config/gfs/config.prepoceanobs | 8 ++++++-- sorc/link_workflow.sh | 1 + 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS index a8fa86d4e5..d5064859d2 100755 --- a/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -7,15 +7,23 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "prepoceanobs" -c "base prepoceanobs" # Set variables used in the script ############################################## +export COMIN_OBS="${DATA}" + ############################################## # Begin JOB SPECIFIC work ############################################## +# Add prep_marine_obs.py to PYTHONPATH +export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -# the relevant script goes here +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + ############################################## # End JOB SPECIFIC work diff --git a/parm/config/gfs/config.prepoceanobs b/parm/config/gfs/config.prepoceanobs index 051769b6ee..c5a9ddfd22 100644 --- a/parm/config/gfs/config.prepoceanobs +++ b/parm/config/gfs/config.prepoceanobs @@ -1,9 +1,13 @@ #!/bin/bash ########## config.prepoceanobs ########## -# Pre Ocn Analysis specific -echo "BEGIN: config.config.prepoceanobs" +echo "BEGIN: config.prepoceanobs" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST=@SOCA_OBS_LIST@ +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} # Get task specific resources . "${EXPDIR}/config.resources" prepoceanobs diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0e2cdf5edd..d2328caba9 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -205,6 +205,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/exglobal_prep_ocean_obs.py" . fi From 247cdf8c7e10e6c98f782799fc51c069b6474e70 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 11:18:53 -0600 Subject: [PATCH 25/34] Corrects missing local variable reference (#2023) Fixes missing `$` in variable reference in archive job. Resolves #2022 --- ush/hpssarch_gen.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 8d8ebf4fe9..2caf0040ef 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -165,8 +165,8 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}.idx" fi } >> "${DATA}/gfs_pgrb2b.txt" fi From 31b05a99b77d0f6eaa8b116485a41a43867d20e1 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 12:23:32 -0600 Subject: [PATCH 26/34] Move analysis post to new stand-alone task (#1979) This PR addresses issue #1130. A new task `postanl.sh` has been added such that the `anl` post-processing is not it's own task within the Rocoto workflow. This change prevents the GDAS experiment from breaking due to failures caused by the `anl` not being available for the first cycle. Resolves #1130 --- jobs/JGLOBAL_ATMOS_POST | 12 --------- jobs/rocoto/post.sh | 7 +---- jobs/rocoto/postanl.sh | 1 + workflow/applications/gfs_cycled.py | 2 +- workflow/hosts/awspw.yaml | 2 +- workflow/rocoto/gfs_tasks.py | 40 ++++++++++++++++++----------- 6 files changed, 29 insertions(+), 35 deletions(-) create mode 120000 jobs/rocoto/postanl.sh diff --git a/jobs/JGLOBAL_ATMOS_POST b/jobs/JGLOBAL_ATMOS_POST index a0cd8871e1..07890b9df5 100755 --- a/jobs/JGLOBAL_ATMOS_POST +++ b/jobs/JGLOBAL_ATMOS_POST @@ -17,18 +17,6 @@ export g2tmpl_ver=${g2tmpl_ver:-v1.5.0} ############################################## export CDUMP=${RUN/enkf} - -############################################## -# TODO: Remove this egregious HACK -############################################## -if [[ "${SDATE:-}" = "${PDY}${cyc}" ]]; then - if [[ ${post_times} = "anl" ]]; then - echo "No offline post-processing in the first half cycle for analysis" - exit 0 - fi -fi - - ############################################## # Begin JOB SPECIFIC work ############################################## diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh index e84b2b7b71..c91b9d4679 100755 --- a/jobs/rocoto/post.sh +++ b/jobs/rocoto/post.sh @@ -4,7 +4,6 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## NCEP post driver script -## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...) ## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...) ############################################################### @@ -16,11 +15,7 @@ status=$? export job="post" export jobid="${job}.$$" -if [ ${FHRGRP} = 'anl' ]; then - fhrlst="anl" -else - fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') -fi +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') #--------------------------------------------------------------- for fhr in ${fhrlst}; do diff --git a/jobs/rocoto/postanl.sh b/jobs/rocoto/postanl.sh new file mode 120000 index 0000000000..29e1fc721f --- /dev/null +++ b/jobs/rocoto/postanl.sh @@ -0,0 +1 @@ +post.sh \ No newline at end of file diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 6eff929d5f..6edd6bc0d8 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -101,7 +101,7 @@ def get_task_names(self): """ gdas_gfs_common_tasks_before_fcst = ['prep'] - gdas_gfs_common_tasks_after_fcst = ['post'] + gdas_gfs_common_tasks_after_fcst = ['postanl', 'post'] # if self.do_ocean: # TODO: uncomment when ocnpost is fixed in cycled mode # gdas_gfs_common_tasks_after_fcst += ['ocnpost'] gdas_gfs_common_tasks_after_fcst += ['vrfy'] diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index ee4999d4f8..becb38e236 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -21,4 +21,4 @@ LOCALARCH: 'NO' ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' # TODO: This will not yet work from AWS. MAKE_NSSTBUFR: 'NO' MAKE_ACFTBUFR: 'NO' -SUPPORTED_RESOLUTIONS: ['C48'] # TODO: Test and support all cubed-sphere resolutions. +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 55055a46ee..ca9af5af7d 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -549,24 +549,37 @@ def _fcst_cycled(self): return task def post(self): - add_anl_to_post = False - if self.app_config.mode in ['cycled']: - add_anl_to_post = True + return self._post_task('post') + + def postanl(self): + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': 'anl', + 'ROTDIR': self._base.get('ROTDIR')} + + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) - return self._post_task('post', add_anl_to_post=add_anl_to_post) + deps = [] + atm_anl_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_ANALYSIS_TMPL"]) + data = f'{atm_anl_path}/{self.cdump}.t@Hz.loganl.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + resources = self.get_resource('post') + task = create_wf_task('postanl', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + cycledef=self.cdump) + + return task def ocnpost(self): if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost in cycled mode - return self._post_task('ocnpost', add_anl_to_post=False) + return self._post_task('ocnpost') - def _post_task(self, task_name, add_anl_to_post=False): + def _post_task(self, task_name): if task_name not in ['post', 'ocnpost']: raise KeyError(f'Invalid post-processing task: {task_name}') - if task_name in ['ocnpost']: - add_anl_to_post = False - - def _get_postgroups(cdump, config, add_anl=False): + def _get_postgroups(cdump, config): fhmin = config['FHMIN'] fhmax = config['FHMAX'] @@ -591,8 +604,6 @@ def _get_postgroups(cdump, config, add_anl=False): fhrs = [f'f{fhr:03d}' for fhr in fhrs] fhrs = np.array_split(fhrs, ngrps) fhrs = [fhr.tolist() for fhr in fhrs] - if add_anl: - fhrs.insert(0, ['anl']) grp = ' '.join(f'_{fhr[0]}-{fhr[-1]}' if len(fhr) > 1 else f'_{fhr[0]}' for fhr in fhrs) dep = ' '.join([fhr[-1] for fhr in fhrs]) @@ -610,14 +621,13 @@ def _get_postgroups(cdump, config, add_anl=False): dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) postenvars = self.envars.copy() - postenvar_dict = {'FHRGRP': '#grp#', - 'FHRLST': '#lst#', + postenvar_dict = {'FHRLST': '#lst#', 'ROTDIR': self._base.get('ROTDIR')} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name], add_anl=add_anl_to_post) + varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name]) vardict = {varname2: varval2, varname3: varval3} cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump From 93bc918cdabee771c182c6f33cc54a20b5bb86c6 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Thu, 2 Nov 2023 16:01:18 -0400 Subject: [PATCH 27/34] Create separate GSI monitor jobs and remove from vrfy job (#1983) This PR will move the GSI monitor tasks from the `vrfy` job into their own separate jobs in the rocoto mesh. This takes care of part of issue #235. Changes in this PR: 1. Create new individual GSI monitor jobs in the rocoto mesh to match how it is done in operations. The new jobs are: `gdasverfozn`, `gdasverfrad`, `gdasvminmon`, and `gfsvminmon`. The names correspond to the names in operations and the ecf script names. The jobs are only added to cycled mode, none of the jobs are run in forecast-only mode. 2. Updates to GSI monitor scripts: * Update `ush/jjob_header.sh` arguments in JJOB scripts to call new individual job names and configs. * Introduce `COM_*` variables and use of the `generate_com` utility. This results in the outputs landing in `COM` subfolders, like occurs in operations, instead of landing in the online archive. * Some variable cleanup in JJOB and ush scripts, includes cleaning up usage of `DATA` variable and its removal at the end of the jobs. * Add missing exit statements to `JGFS_ATMOS_VMINMON` and `JGDAS_ATMOS_VERFRAD`. 3. Create new rocoto job scripts for the new individual GSI monitor jobs. 4. Create new configs for the new individual GSI monitor jobs. 5. Add new individual GSI monitor jobs to resource configuration, using the same resources as in operations. 6. Move and rename GSI monitor job switches from `config.vrfy` to `config.base`. Also add switches to setup system. New switches to control each job being added to the rocoto mesh are: `DO_VERFOZN`, `DO_VERFRAD`, `DO_VMINMON` 7. Add GSI monitor job output to archival job and into resulting HPSS tarballs or local tarballs. This now matches what is done in operations. 8. Remove the GSI monitor tasks from the rocoto `vrfy` job and associated `config.vrfy`. Example output locations and files (not showing contents of oznmon/horiz or oznmon/time for brevity, see archival script for full list of output files): ``` kate.friedman@dlogin09:/lfs/h2/emc/ptmp/kate.friedman/comrot/testmonitor2> ll gdas.20211221/00/products/atmos/*mon gdas.20211221/00/products/atmos/minmon: total 112 -rw-r--r-- 1 kate.friedman emc 14030 Oct 25 15:00 2021122100.costs.txt -rw-r--r-- 1 kate.friedman emc 84932 Oct 25 15:00 2021122100.cost_terms.txt -rw-r--r-- 1 kate.friedman emc 808 Oct 25 15:00 2021122100.gnorms.ieee_d -rw-r--r-- 1 kate.friedman emc 808 Oct 25 15:00 2021122100.reduction.ieee_d -rw-r--r-- 1 kate.friedman emc 80 Oct 25 15:00 gnorm_data.txt gdas.20211221/00/products/atmos/oznmon: total 8 drwxr-sr-x 2 kate.friedman emc 4096 Oct 25 15:05 horiz drwxr-sr-x 2 kate.friedman emc 4096 Oct 25 15:05 time gdas.20211221/00/products/atmos/radmon: total 21036 -rw-r--r-- 1 kate.friedman emc 231 Oct 25 15:12 bad_diag.2021122100 -rw-r--r-- 1 kate.friedman emc 9035 Oct 25 15:12 bad_pen.2021122100 -rw-r--r-- 1 kate.friedman emc 1449 Oct 25 15:12 low_count.2021122100 -rw-r--r-- 1 kate.friedman emc 20523403 Oct 25 15:07 radmon_angle.tar.gz -rw-r--r-- 1 kate.friedman emc 217272 Oct 25 15:08 radmon_bcoef.tar.gz -rw-r--r-- 1 kate.friedman emc 502151 Oct 25 15:10 radmon_bcor.tar.gz -rw-r--r-- 1 kate.friedman emc 264480 Oct 25 15:12 radmon_time.tar.gz -rw-r--r-- 1 kate.friedman emc 684 Oct 25 15:12 warning.2021122100 kate.friedman@dlogin09:/lfs/h2/emc/ptmp/kate.friedman/comrot/testmonitor2> ll gfs.20211221/00/products/atmos/minmon/ total 88 -rw-r--r-- 1 kate.friedman emc 10530 Oct 25 14:55 2021122100.costs.txt -rw-r--r-- 1 kate.friedman emc 63882 Oct 25 14:55 2021122100.cost_terms.txt -rw-r--r-- 1 kate.friedman emc 808 Oct 25 14:55 2021122100.gnorms.ieee_d -rw-r--r-- 1 kate.friedman emc 608 Oct 25 14:55 2021122100.reduction.ieee_d -rw-r--r-- 1 kate.friedman emc 80 Oct 25 14:55 gnorm_data.txt ``` Lingering work and considerations: 1. Issue #1925 2. Consider not hardcoding the list of types in archival and perhaps tie it to something reliable or configurable (maybe into `config.verfozn`?): `subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19"` Resolves #1908 --- jobs/JGDAS_ATMOS_VERFOZN | 19 +++------ jobs/JGDAS_ATMOS_VERFRAD | 26 +++++------- jobs/JGDAS_ATMOS_VMINMON | 19 +++++---- jobs/JGFS_ATMOS_VMINMON | 22 +++++------ jobs/JGLOBAL_ARCHIVE | 3 +- jobs/rocoto/verfozn.sh | 22 +++++++++++ jobs/rocoto/verfrad.sh | 22 +++++++++++ jobs/rocoto/vminmon.sh | 22 +++++++++++ jobs/rocoto/vrfy.sh | 50 ----------------------- parm/config/gfs/config.base.emc.dyn | 3 ++ parm/config/gfs/config.com | 2 + parm/config/gfs/config.resources | 31 ++++++++++++++- parm/config/gfs/config.verfozn | 9 +++++ parm/config/gfs/config.verfrad | 9 +++++ parm/config/gfs/config.vminmon | 9 +++++ parm/config/gfs/config.vrfy | 47 ---------------------- ush/hpssarch_gen.sh | 56 +++++++++++++++++++++++++- ush/minmon_xtrct_costs.pl | 2 +- ush/minmon_xtrct_gnorms.pl | 2 +- ush/minmon_xtrct_reduct.pl | 2 +- workflow/applications/applications.py | 3 ++ workflow/applications/gfs_cycled.py | 21 ++++++++++ workflow/rocoto/gfs_tasks.py | 57 ++++++++++++++++++++++++++- workflow/rocoto/tasks.py | 3 +- 24 files changed, 305 insertions(+), 156 deletions(-) create mode 100755 jobs/rocoto/verfozn.sh create mode 100755 jobs/rocoto/verfrad.sh create mode 100755 jobs/rocoto/vminmon.sh create mode 100644 parm/config/gfs/config.verfozn create mode 100644 parm/config/gfs/config.verfrad create mode 100644 parm/config/gfs/config.vminmon diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN index deccc0b28e..3b75359e6a 100755 --- a/jobs/JGDAS_ATMOS_VERFOZN +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -4,7 +4,7 @@ # Set up environment for GDAS Ozone Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfozn" -c "base verfozn" export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} @@ -42,15 +42,12 @@ export p_cyc=${pdate:8:2} #--------------------------------------------- # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE # -export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/atmos/oznmon} -export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/atmos/oznmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_OZNMON -if [[ ! -d ${TANKverf_ozn} ]]; then - mkdir -p -m 775 ${TANKverf_ozn} -fi +export TANKverf_ozn=${TANKverf_ozn:-${COM_ATMOS_OZNMON}} + +if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p -m 775 ${TANKverf_ozn} ; fi #--------------------------------------- # set up validation file @@ -77,10 +74,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD index 42e112c74f..301af7d06e 100755 --- a/jobs/JGDAS_ATMOS_VERFRAD +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -4,18 +4,13 @@ # Set up environment for GDAS Radiance Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfrad" -c "base verfrad" export COMPONENT="atmos" -export RAD_DATA_IN=${DATA} - export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} -mkdir -p ${RAD_DATA_IN} -cd ${RAD_DATA_IN} - ############################################## # Specify Execution Areas ############################################## @@ -52,13 +47,15 @@ export p_cyc=${pdate:8:2} # COMOUT - WHERE GSI OUTPUT RESIDES # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/atmos/radmon} -export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/atmos/radmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RADMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_RADMON_PREV:COM_ATMOS_RADMON_TMPL -mkdir -p -m 775 ${TANKverf_rad} +export TANKverf_rad=${TANKverf_rad:-${COM_ATMOS_RADMON}} +export TANKverf_radM1=${TANKverf_radM1:-${COM_ATMOS_RADMON_PREV}} + +if [[ ! -d ${TANKverf_rad} ]]; then mkdir -p -m 775 ${TANKverf_rad} ; fi +if [[ ! -d ${TANKverf_radM1} ]]; then mkdir -p -m 775 ${TANKverf_radM1} ; fi ######################################## # Set necessary environment variables @@ -89,9 +86,6 @@ fi ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-YES} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${RAD_DATA_IN} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" +exit 0 diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON index 3f9c0d856f..c7c6d12f9a 100755 --- a/jobs/JGDAS_ATMOS_VMINMON +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -4,7 +4,7 @@ # GDAS Minimization Monitor (MinMon) job ########################################################### source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" ########################################################### # obtain unique process id (pid) and make temp directories @@ -40,12 +40,15 @@ export p_cyc=${pdate:8:2} ############################################# # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/atmos/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/atmos/minmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL + +export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} -mkdir -p -m 775 ${M_TANKverf} +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 ${M_TANKverf} ; fi +if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 ${M_TANKverfM1} ; fi ######################################## @@ -65,10 +68,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON index a7300b4dd3..c548d806f5 100755 --- a/jobs/JGFS_ATMOS_VMINMON +++ b/jobs/JGFS_ATMOS_VMINMON @@ -4,7 +4,7 @@ # GFS Minimization Monitor (MinMon) job ########################################################### source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" ########################################################### # obtain unique process id (pid) and make temp directories @@ -39,12 +39,15 @@ export p_cyc=${pdate:8:2} # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS -YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL -M_TANKverf=${M_TANKverf:-${COM_ATMOS_ANALYSIS}/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_ANALYSIS_PREV}/minmon} +export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} + +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 ${M_TANKverf} ; fi +if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 ${M_TANKverfM1} ; fi -mkdir -p -m 775 ${M_TANKverf} ######################################## # Set necessary environment variables @@ -63,11 +66,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} - -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi - +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" +exit 0 diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index f2828a9fdd..e6c016e703 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -18,7 +18,8 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMO COM_OBS COM_TOP \ COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \ COM_OCEAN_ANALYSIS \ - COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \ + COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON for grid in "0p25" "0p50" "1p00"; do YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" diff --git a/jobs/rocoto/verfozn.sh b/jobs/rocoto/verfozn.sh new file mode 100755 index 0000000000..70a772fca6 --- /dev/null +++ b/jobs/rocoto/verfozn.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfozn" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/verfrad.sh b/jobs/rocoto/verfrad.sh new file mode 100755 index 0000000000..a687d3cf26 --- /dev/null +++ b/jobs/rocoto/verfrad.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfrad" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vminmon.sh b/jobs/rocoto/vminmon.sh new file mode 100755 index 0000000000..2bbb7599ca --- /dev/null +++ b/jobs/rocoto/vminmon.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="vminmon" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN MINMON ===============" + +"${HOMEgfs}/jobs/J${RUN^^}_ATMOS_VMINMON" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh index 7dc52351fc..67ac43e137 100755 --- a/jobs/rocoto/vrfy.sh +++ b/jobs/rocoto/vrfy.sh @@ -22,11 +22,6 @@ CDATEm1=$(${NDATE} -24 "${PDY}${cyc}") export CDATEm1 export PDYm1=${CDATEm1:0:8} -CDATEm1c=$(${NDATE} -06 "${PDY}${cyc}") -PDYm1c=${CDATEm1c:0:8} -pcyc=${CDATEm1c:8:2} - - ############################################################### # TODO: We can likely drop support for these dev-only grib1 precip files echo @@ -57,51 +52,6 @@ if [[ "${RUNMOS}" == "YES" && "${CDUMP}" == "gfs" ]]; then fi -############################################################### -echo -echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" - -if [[ "${VRFYRAD}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - - export EXP=${PSLOT} - export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYRADSH} - -fi - - -############################################################### -echo -echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [[ "${VRFYOZN}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - - export EXP=${PSLOT} - export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYOZNSH} - -fi - - -############################################################### -echo -echo "=============== START TO RUN MINMON ===============" -if [[ "${VRFYMINMON}" == "YES" && "${PDY}${cyc}" != "${SDATE}" && "${MODE}" = "cycled" ]]; then - - export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYMINSH} - -fi - - ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index b77787794c..8445f50400 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -57,6 +57,9 @@ export DO_BUFRSND="NO" # BUFR sounding products export DO_GEMPAK="NO" # GEMPAK products export DO_AWIPS="NO" # AWIPS products export DO_VRFY="YES" # VRFY step +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 87a5b74b6f..208b0ac096 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -63,6 +63,8 @@ declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_v declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 1c21048c26..037b98803d 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -14,7 +14,8 @@ if [[ $# -ne 1 ]]; then echo "atmensanlinit atmensanlrun atmensanlfinal" echo "landanl" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch cleanup echgres" + echo "anal sfcanl analcalc analdiag fcst post echgres" + echo "verfozn verfrad vminmon vrfy fit2obs metp arch cleanup" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" @@ -714,6 +715,34 @@ elif [[ ${step} = "post" ]]; then if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi export is_exclusive=True +elif [[ ${step} = "verfozn" ]]; then + + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + +elif [[ ${step} = "verfrad" ]]; then + + export wtime_verfrad="00:20:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + +elif [[ ${step} = "vminmon" ]]; then + + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + elif [[ ${step} = "vrfy" ]]; then export wtime_vrfy="03:00:00" diff --git a/parm/config/gfs/config.verfozn b/parm/config/gfs/config.verfozn new file mode 100644 index 0000000000..4091db3a11 --- /dev/null +++ b/parm/config/gfs/config.verfozn @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +echo "END: config.verfozn" diff --git a/parm/config/gfs/config.verfrad b/parm/config/gfs/config.verfrad new file mode 100644 index 0000000000..e6dffcaa45 --- /dev/null +++ b/parm/config/gfs/config.verfrad @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +echo "END: config.verfrad" diff --git a/parm/config/gfs/config.vminmon b/parm/config/gfs/config.vminmon new file mode 100644 index 0000000000..d8888a7cb6 --- /dev/null +++ b/parm/config/gfs/config.vminmon @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +echo "END: config.vminmon" diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy index 8754609c50..8b8c393ee1 100644 --- a/parm/config/gfs/config.vrfy +++ b/parm/config/gfs/config.vrfy @@ -10,58 +10,11 @@ echo "BEGIN: config.vrfy" export CDFNL="gdas" # Scores verification against GDAS/GFS analysis export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification -export VRFYRAD="YES" # Radiance data assimilation monitoring -export VRFYOZN="YES" # Ozone data assimilation monitoring -export VRFYMINMON="YES" # GSI minimization monitoring export VRFYTRAK="YES" # Hurricane track verification export VRFYGENESIS="YES" # Cyclone genesis verification export VRFYFSU="NO" # Cyclone genesis verification (FSU) export RUNMOS="NO" # whether to run entire MOS package -#---------------------------------------------------------- -# Minimization, Radiance and Ozone Monitoring -#---------------------------------------------------------- - -if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then - - export envir="para" - export COM_IN=${ROTDIR} - - # Radiance Monitoring - if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - - export RADMON_SUFFIX=${PSLOT} - export TANKverf="${NOSCRUB}/monitor/radmon" - export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" - - fi - - # Minimization Monitoring - if [[ "${VRFYMINMON}" = "YES" ]] ; then - - export MINMON_SUFFIX=${PSLOT} - export M_TANKverf="${NOSCRUB}/monitor/minmon" - if [[ "${RUN}" = "gdas" ]] ; then - export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" - elif [[ "${RUN}" = "gfs" ]] ; then - export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" - fi - - fi - - # Ozone Monitoring - if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - - export HOMEgfs_ozn="${HOMEgfs}" - export OZNMON_SUFFIX=${PSLOT} - export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" - export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" - - fi - -fi - - #------------------------------------------------- # Cyclone genesis and cyclone track verification #------------------------------------------------- diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 2caf0040ef..07f3c0c8bd 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -130,6 +130,17 @@ if [[ ${type} = "gfs" ]]; then for file in "${genesis_files[@]}"; do [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" done + + # GSI Monitor job output + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + } >> "${DATA}/gfsa.txt" { @@ -350,7 +361,7 @@ if [[ ${type} == "gdas" ]]; then if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" fi - for fstep in prep anal fcst vrfy radmon minmon oznmon; do + for fstep in prep anal fcst vrfy verfozn verfrad vminmon; do if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then echo "./logs/${PDY}${cyc}/gdas${fstep}.log" fi @@ -379,6 +390,49 @@ if [[ ${type} == "gdas" ]]; then echo "${file}.idx" fi done + + # GSI Monitor jobs output + + if [[ ${DO_VERFOZN} = "YES" ]]; then + for type in horiz time; do + if [[ ${type} = "horiz" ]]; then + suffix=".gz" + elif [[ ${type} = "time" ]]; then + suffix="" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_cnt.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_pen.${PDY}${cyc}" + fi + subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19" + for subtype in ${subtyplist}; do + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.ctl" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.ctl" + done + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/stdout.${type}.tar.gz" + done + fi + + if [[ ${DO_VERFRAD} = "YES" ]]; then + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_pen.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/low_count.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_angle.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcoef.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcor.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_time.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/warning.${PDY}${cyc}" + fi + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + } >> "${DATA}/gdas.txt" #.................. diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl index 1b5d490102..502032da80 100755 --- a/ush/minmon_xtrct_costs.pl +++ b/ush/minmon_xtrct_costs.pl @@ -208,7 +208,7 @@ #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl index ecd44232da..0125c58ac8 100755 --- a/ush/minmon_xtrct_gnorms.pl +++ b/ush/minmon_xtrct_gnorms.pl @@ -414,7 +414,7 @@ sub updateGnormData { #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl index f6037d3f32..1b8186b6ad 100755 --- a/ush/minmon_xtrct_reduct.pl +++ b/ush/minmon_xtrct_reduct.pl @@ -72,7 +72,7 @@ #---------------------------- # copy outfile to $M_TANKverf #---------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 19c2082dd3..138a5ef43f 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -55,6 +55,9 @@ def __init__(self, conf: Configuration) -> None: self.do_awips = _base.get('DO_AWIPS', False) self.do_wafs = _base.get('WAFSF', False) self.do_vrfy = _base.get('DO_VRFY', True) + self.do_verfozn = _base.get('DO_VERFOZN', True) + self.do_verfrad = _base.get('DO_VERFRAD', True) + self.do_vminmon = _base.get('DO_VMINMON', True) self.do_metp = _base.get('DO_METP', False) self.do_hpssarch = _base.get('HPSSARCH', False) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 6edd6bc0d8..cdb5e18f3e 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -56,6 +56,15 @@ def _get_app_configs(self): configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] + if self.do_verfozn: + configs += ['verfozn'] + + if self.do_verfrad: + configs += ['verfrad'] + + if self.do_vminmon: + configs += ['vminmon'] + if self.do_metp: configs += ['metp'] @@ -159,6 +168,15 @@ def get_task_names(self): if self.do_fit2obs: gdas_tasks += ['fit2obs'] + if self.do_verfozn: + gdas_tasks += ['verfozn'] + + if self.do_verfrad: + gdas_tasks += ['verfrad'] + + if self.do_vminmon: + gdas_tasks += ['vminmon'] + gdas_tasks += gdas_gfs_common_cleanup_tasks # Collect "gfs" cycle tasks @@ -171,6 +189,9 @@ def get_task_names(self): gfs_tasks += gdas_gfs_common_tasks_after_fcst + if self.do_vminmon: + gfs_tasks += ['vminmon'] + if self.do_metp: gfs_tasks += ['metp'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index ca9af5af7d..5e2ed8cd03 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -864,6 +864,39 @@ def gempak(self): return task + def verfozn(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfozn') + task = create_wf_task('verfozn', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def verfrad(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfrad') + task = create_wf_task('verfrad', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def vminmon(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('vminmon') + task = create_wf_task('vminmon', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def vrfy(self): deps = [] dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} @@ -918,6 +951,28 @@ def metp(self): def arch(self): deps = [] + dependencies = [] + if self.app_config.do_verfozn or self.app_config.do_verfrad or self.app_config.do_vminmon: + if self.app_config.mode in ['cycled']: + if self.cdump in ['gfs']: + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps.append(rocoto.add_dependency(dep_dict)) + elif self.cdump in ['gdas']: + deps2 = [] + if self.app_config.do_verfozn: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfozn'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_verfrad: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfrad'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps2.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps2) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) if self.app_config.do_vrfy: dep_dict = {'type': 'task', 'name': f'{self.cdump}vrfy'} deps.append(rocoto.add_dependency(dep_dict)) @@ -941,7 +996,7 @@ def arch(self): dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps + dependencies) cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 29ed57daf2..b1dd1b0d92 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -19,7 +19,8 @@ class Tasks: 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'preplandobs', 'landanl', - 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', + 'fcst', 'post', 'ocnpost', + 'verfozn', 'verfrad', 'vminmon', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep'] From bd4c56d99c23c5c41f0bd5f8a980ef99ee160888 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 16:09:59 -0400 Subject: [PATCH 28/34] changed optional script name vars for ocean analysis (#2025) --- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY | 2 +- jobs/JGLOBAL_PREP_OCEAN_OBS | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT index a9366f7d80..a1ecc116ea 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT @@ -20,7 +20,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} +EXSCRIPT=${GDASOCNBMATSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY index 944181089c..08e7da60c0 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY @@ -19,7 +19,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} +EXSCRIPT=${GDASOCNMBATVRFYSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT index 6eab956ad9..afac9fbc25 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -34,7 +34,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +EXSCRIPT=${GDASOCNCHKPTSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST index ab86b66f20..6034fc5425 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -31,7 +31,7 @@ export PYTHONPATH # Run relevant script ############################################################### -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} +EXSCRIPT=${GDASOCNPOSTPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index 3aecf1fb2e..2e49a9f14d 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -41,7 +41,7 @@ export PYTHONPATH ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} +EXSCRIPT=${GDASOCNPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN index 7c584b74d8..5871497223 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN @@ -17,7 +17,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalr ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} +EXSCRIPT=${GDASOCNRUNSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY index ec154af7f9..aba76d7d1a 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY @@ -32,7 +32,7 @@ export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} +EXSCRIPT=${GDASOCNVRFYPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS index d5064859d2..44cbbf1c08 100755 --- a/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -19,7 +19,7 @@ export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} +EXSCRIPT=${GDASPREPOCNOBSPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" From cbdc09711a83493f363a737d1e3e46c48a09719d Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 3 Nov 2023 04:09:33 +0000 Subject: [PATCH 29/34] Fix staging of MOM data (#2028) During the update to stage_ic, the copying of the additional res_N files for 0p25 was omitted. These are now properly copied. Resolves #2027 --- scripts/exglobal_stage_ic.sh | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index 43812adc89..53042c7e45 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -60,6 +60,26 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" err=$((err + rc)) + case "${OCNRES}" in + "500" | "100") + # Nothing more to do for these resolutions + ;; + "025" ) + for nn in $(seq 1 3); do + src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + ;; + *) + echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" + rc=1 + err=$((err + rc)) + ;; + esac fi # Stage ice initial conditions to ROTDIR (warm start) if [[ "${DO_ICE:-}" = "YES" ]]; then From 2563806821d4ae3b57120bc61aa0575139a708f0 Mon Sep 17 00:00:00 2001 From: jiandewang Date: Fri, 3 Nov 2023 11:49:37 -0400 Subject: [PATCH 30/34] Add two ucx modules in load_ufswm_modules.sh to solve C768 and C1152 S2SW job hanging issue on WCOSS2 (#2021) --- ush/load_ufswm_modules.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh index 2d6e6a02c1..9fea63f402 100755 --- a/ush/load_ufswm_modules.sh +++ b/ush/load_ufswm_modules.sh @@ -19,6 +19,8 @@ if [[ "${MACHINE_ID}" != "noaacloud" ]]; then module load cray-pals module load cfp module load libjpeg + module load craype-network-ucx + module load cray-mpich-ucx else module load prod-util export UTILROOT=${prod_util_ROOT} From 8d55126bb0b27c79c8bee7f72b014abd34950ff9 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Fri, 3 Nov 2023 17:06:09 -0400 Subject: [PATCH 31/34] Implement IAU Cycling Type with Marine Data Assimilation Enabled (#1944) Allows cycling in IAU mode with the S2S UFS model, soca and the GSI. Most of the work related to making IAU work was done by @JessicaMeixner-NOAA . Thanks @JessicaMeixner-NOAA ! Resolves #1943 Refs #1776 --- parm/config/gfs/config.resources | 12 +++++++--- parm/ufs/fv3/diag_table_da | 18 +++++++-------- parm/ufs/mom6/MOM_input_template_025 | 4 ++-- parm/ufs/mom6/MOM_input_template_100 | 2 +- parm/ufs/mom6/MOM_input_template_500 | 9 ++++++++ ush/forecast_postdet.sh | 15 ++++++------ ush/forecast_predet.sh | 34 +++++++++++++++------------- 7 files changed, 55 insertions(+), 39 deletions(-) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 037b98803d..1f89994b69 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -394,13 +394,14 @@ elif [[ "${step}" = "ocnanalrun" ]]; then case ${CASE} in C384) npes=480 - export memory_ocnanalchkpt="2.8TB" + memory_ocnanalrun="128GB" ;; C96) npes=16 ;; C48) npes=16 + memory_ocnanalrun="64GB" ;; *) echo "FATAL: Resolution not supported'" @@ -409,10 +410,11 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export wtime_ocnanalrun="00:15:00" export npe_ocnanalrun=${npes} - export nth_ocnanalrun=1 + export nth_ocnanalrun=2 export is_exclusive=True npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) export npe_node_ocnanalrun + export memory_ocnanalrun elif [[ "${step}" = "ocnanalchkpt" ]]; then @@ -648,7 +650,11 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then declare -x "wtime_${step}"="00:30:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; - "C384" | "C768" | "C1152") + "C384") + declare -x "wtime_${step}"="00:20:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") declare -x "wtime_${step}"="01:00:00" declare -x "wtime_${step}_gfs"="06:00:00" ;; diff --git a/parm/ufs/fv3/diag_table_da b/parm/ufs/fv3/diag_table_da index cdcc36ee57..40824caee9 100644 --- a/parm/ufs/fv3/diag_table_da +++ b/parm/ufs/fv3/diag_table_da @@ -2,15 +2,15 @@ "fv3_history2d", 0, "hours", 1, "hours", "time" "ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1, "hours" -"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 "gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/mom6/MOM_input_template_025 b/parm/ufs/mom6/MOM_input_template_025 index 604689c376..df56a3f486 100644 --- a/parm/ufs/mom6/MOM_input_template_025 +++ b/parm/ufs/mom6/MOM_input_template_025 @@ -341,8 +341,8 @@ DIAG_COORDS = "z Z ZSTAR" ! A list of string tuples associating diag_table modules to ! a coordinate definition used for diagnostics. Each string ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". -DIAG_COORD_DEF_Z="FILE:interpolate_zgrid_40L.nc,interfaces=zw" -DIAG_MISVAL = -1e34 +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] !DIAG_COORD_DEF_RHO2 = "FILE:diag_rho2.nc,interfaces=rho2" ! default = "WOA09" ! Determines how to specify the coordinate resolution. Valid options are: ! PARAM - use the vector-parameter DIAG_COORD_RES_RHO2 diff --git a/parm/ufs/mom6/MOM_input_template_100 b/parm/ufs/mom6/MOM_input_template_100 index 5c671fe9d3..f26d6e4bfb 100644 --- a/parm/ufs/mom6/MOM_input_template_100 +++ b/parm/ufs/mom6/MOM_input_template_100 @@ -322,7 +322,7 @@ DIAG_COORDS = "z Z ZSTAR" ! a coordinate definition used for diagnostics. Each string ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" -DIAG_MISVAL = -1e34 +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] !AVAILABLE_DIAGS_FILE = "available_diags.002160" ! default = "available_diags.000000" ! A file into which to write a list of all available ocean diagnostics that can ! be included in a diag_table. diff --git a/parm/ufs/mom6/MOM_input_template_500 b/parm/ufs/mom6/MOM_input_template_500 index 1d75ba1e71..dde805d247 100644 --- a/parm/ufs/mom6/MOM_input_template_500 +++ b/parm/ufs/mom6/MOM_input_template_500 @@ -258,6 +258,15 @@ Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False ! If True, then remap straight to model coordinate from file. ! === module MOM_diag_mediator === +NUM_DIAG_COORDS = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] ! === module MOM_MEKE === USE_MEKE = True ! [Boolean] default = False diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index e3166cd72f..bafa61dd0e 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -678,12 +678,12 @@ MOM6_postdet() { echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination" # Copy MOM6 ICs - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" case ${OCNRES} in "025") for nn in $(seq 1 4); do - if [[ -f "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" + if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" fi done ;; @@ -904,7 +904,7 @@ CICE_postdet() { # Copy CICE ICs echo "Link CICE ICs" - cice_restart_file="${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" + cice_restart_file="${COM_ICE_RESTART_PREV}/${sPDY}.${scyc}0000.cice_model.res.nc" if [[ ! -f "${cice_restart_file}" ]]; then echo "FATAL ERROR: CICE restart file not found at '${cice_restart_file}', ABORT!" exit 112 @@ -1038,7 +1038,7 @@ GOCART_postdet() { rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" fi - #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end + #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ # "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done @@ -1049,8 +1049,8 @@ GOCART_out() { # Copy gocart.inst_aod after the forecast is run (and successfull) # TO DO: this should be linked but there were issues where gocart was crashing if it was linked - local fhr - local vdate + local fhr + local vdate for fhr in ${FV3_OUTPUT_FH}; do if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) @@ -1060,4 +1060,3 @@ GOCART_out() { } - diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index d6f5cc7cc3..ce0b50f818 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -71,6 +71,24 @@ common_predet(){ next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) + # IAU options + IAU_OFFSET=${IAU_OFFSET:-0} + DOIAU=${DOIAU:-"NO"} + if [[ "${DOIAU}" = "YES" ]]; then + sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H) + sPDY="${sCDATE:0:8}" + scyc="${sCDATE:8:2}" + tPDY=${previous_cycle:0:8} + tcyc=${previous_cycle:8:2} + else + sCDATE=${current_cycle} + sPDY=${current_cycle:0:8} + scyc=${current_cycle:8:2} + tPDY=${sPDY} + tcyc=${scyc} + fi + + cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } @@ -121,10 +139,8 @@ FV3_predet(){ PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment # IAU options - DOIAU=${DOIAU:-"NO"} IAUFHRS=${IAUFHRS:-0} IAU_DELTHRS=${IAU_DELTHRS:-0} - IAU_OFFSET=${IAU_OFFSET:-0} # Model config options ntiles=6 @@ -216,20 +232,6 @@ FV3_predet(){ mkdir -p "${DATA}/RESTART" fi - if [[ "${DOIAU}" = "YES" ]]; then - sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H) - sPDY="${sCDATE:0:8}" - scyc="${sCDATE:8:2}" - tPDY=${previous_cycle:0:8} - tcyc=${previous_cycle:8:2} - else - sCDATE=${current_cycle} - sPDY=${current_cycle:0:8} - scyc=${current_cycle:8:2} - tPDY=${sPDY} - tcyc=${scyc} - fi - echo "SUB ${FUNCNAME[0]}: pre-determination variables set" } From 75269e4bb9764b81589efc7d703825b80e74c8f5 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Mon, 6 Nov 2023 17:34:47 +0000 Subject: [PATCH 32/34] Streamline CI reporting to PRs (#2026) Cleaned up the logs for CI PRs sent to the GitHub Messages: - Eliminates redundant accumulative reporting - Reports on all created or skipped experiment instantiations in a single message - Gives one line report once for each experiment completion - Single final report completion of CI See some examples in forked [PR 195](https://github.com/TerrenceMcGuinness-NOAA/global-workflow/pull/195) Resolves #2007 --- ci/platforms/config.hera | 4 ++-- ci/platforms/config.orion | 4 ++-- ci/scripts/check_ci.sh | 37 ++++++++++++++++---------------- ci/scripts/clone-build_ci.sh | 23 +++++++++----------- ci/scripts/driver.sh | 40 ++++++++++++++++++++++------------- workflow/create_experiment.py | 6 +++--- 6 files changed, 61 insertions(+), 53 deletions(-) diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera index c4c87bc197..76a6a08670 100644 --- a/ci/platforms/config.hera +++ b/ci/platforms/config.hera @@ -4,5 +4,5 @@ export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR export STMP="/scratch1/NCEPDEV/stmp2/${USER}" export SLURM_ACCOUNT=nems -export max_concurrent_cases=2 -export max_concurrent_pr=2 +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion index 0cbbd5fe47..886a6e63b2 100644 --- a/ci/platforms/config.orion +++ b/ci/platforms/config.orion @@ -4,5 +4,5 @@ export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR export STMP="/work/noaa/stmp/${USER}" export SLURM_ACCOUNT=nems -export max_concurrent_cases=2 -export max_concurrent_pr=2 +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index 097e20ced4..a5d7c77e66 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -70,6 +70,8 @@ fi for pr in ${pr_list}; do id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + output_ci="${GFS_CI_ROOT}/PR/${pr}/output_runtime_${id}" + output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_runtime_single.log" echo "Processing Pull Request #${pr} and looking for cases" pr_dir="${GFS_CI_ROOT}/PR/${pr}" @@ -83,8 +85,9 @@ for pr in ${pr_list}; do # shellcheck disable=SC2312 if [[ -z $(ls -A "${pr_dir}/RUNTESTS/EXPDIR") ]] ; then "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "1 i\`\`\`" "${output_ci}" + sed -i "1 i\All CI Test Cases Passed:" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}" --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true @@ -121,22 +124,20 @@ for pr in ${pr_list}; do rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true - num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + # num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true if [[ ${num_failed} -ne 0 ]]; then - { - echo "Experiment ${pslot} Terminated: *FAILED*" - echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true { + echo "Experiment ${pslot} Terminated: *** FAILED ***" + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true echo "Error logs:" echo "${error_logs}" - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" + sed -i "1 i\`\`\`" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" for kill_cases in "${pr_dir}/RUNTESTS/"*; do pslot=$(basename "${kill_cases}") @@ -145,16 +146,16 @@ for pr in ${pr_list}; do break fi if [[ "${num_done}" -eq "${num_cycles}" ]]; then - { - echo "Experiment ${pslot} completed: *SUCCESS*" - echo "Experiment ${pslot} Completed at $(date)" || true - echo "with ${num_succeeded} successfully completed jobs" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" #Remove Experment cases that completed successfully rm -Rf "${pslot_dir}" rm -Rf "${pr_dir}/RUNTESTS/COMROT/${pslot}" + rm -f "${output_ci_single}" + # echo "\`\`\`" > "${output_ci_single}" + DATE=$(date) + echo "Experiment ${pslot} **SUCCESS** ${DATE}" >> "${output_ci_single}" + echo "Experiment ${pslot} **SUCCESS** at ${DATE}" >> "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}" + fi done done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 03eff13158..4b77d38ab8 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -40,7 +40,6 @@ while getopts "p:d:o:h" opt; do done cd "${repodir}" || exit 1 -# clone copy of repo if [[ -d global-workflow ]]; then rm -Rf global-workflow fi @@ -48,13 +47,6 @@ fi git clone "${REPO_URL}" cd global-workflow || exit 1 -pr_state=$("${GH}" pr view "${PR}" --json state --jq '.state') -if [[ "${pr_state}" != "OPEN" ]]; then - title=$("${GH}" pr view "${PR}" --json title --jq '.title') - echo "PR ${title} is no longer open, state is ${pr_state} ... quitting" - exit 1 -fi - # checkout pull request "${GH}" pr checkout "${PR}" --repo "${REPO_URL}" HOMEgfs="${PWD}" @@ -78,19 +70,17 @@ echo "${commit}" > "../commit" # run checkout script cd sorc || exit 1 set +e -# TODO enable -u later when GDASApp tests are added ./checkout.sh -c -g -u >> log.checkout 2>&1 checkout_status=$? if [[ ${checkout_status} != 0 ]]; then { - echo "Checkout: *FAILED*" + echo "Checkout: *** FAILED ***" echo "Checkout: Failed at $(date)" || true echo "Checkout: see output at ${PWD}/log.checkout" } >> "${outfile}" exit "${checkout_status}" else { - echo "Checkout: *SUCCESS*" echo "Checkout: Completed at $(date)" || true } >> "${outfile}" fi @@ -104,19 +94,26 @@ build_status=$? if [[ ${build_status} != 0 ]]; then { - echo "Build: *FAILED*" + echo "Build: *** FAILED ***" echo "Build: Failed at $(date)" || true echo "Build: see output at ${PWD}/log.build" } >> "${outfile}" exit "${build_status}" else { - echo "Build: *SUCCESS*" echo "Build: Completed at $(date)" || true } >> "${outfile}" fi ./link_workflow.sh +link_status=$? +if [[ ${link_status} != 0 ]]; then + { + echo "Link: *** FAILED ***" + echo "Link: Failed at $(date)" || true + } >> "${outfile}" + exit "${link_status}" +fi echo "check/build/link test completed" exit "${build_status}" diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 00143fa049..7988ff17a1 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -119,7 +119,10 @@ for pr in ${pr_list}; do # call clone-build_ci to clone and build PR id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') set +e - "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + output_ci="${pr_dir}/output_build_${id}" + rm -f "${output_ci}" + "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${output_ci}" + #echo "SKIPPING: ${ROOT_DIR}/ci/scripts/clone-build_ci.sh" ci_status=$? ################################################################## # Checking for special case when Ready label was updated @@ -138,7 +141,7 @@ for pr in ${pr_list}; do #setup space to put an experiment # export RUNTESTS for yaml case files to pickup export RUNTESTS="${pr_dir}/RUNTESTS" - #rm -Rf "${pr_dir:?}/RUNTESTS/"* + rm -Rf "${pr_dir:?}/RUNTESTS/"* ############################################################# # loop over every yaml file in the PR's ci/cases @@ -155,39 +158,46 @@ for pr in ${pr_list}; do rm -Rf "${STMP}/RUNDIRS/${pslot}" set +e export LOGFILE_PATH="${HOMEgfs}/ci/scripts/create_experiment.log" - "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" + rm -f "${LOGFILE_PATH}" + "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" 2>&1 "${LOGFILE_PATH}" ci_status=$? set -e if [[ ${ci_status} -eq 0 ]]; then + last_line=$(tail -1 "${LOGFILE_PATH}") + if [[ "${last_line}" == *"Skipping creation"* ]]; then + action="Skipped" + else + action="Completed" + fi { - echo "Created experiment: *SUCCESS*" - echo "Case setup: Completed at $(date) for experiment ${pslot}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" - "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running + echo "Case setup: ${action} for experiment ${pslot}" || true + } >> "${output_ci}" else { - echo "Failed to create experiment: *FAIL* ${pslot}" - echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true + echo "*** Failed *** to create experiment: ${pslot}" echo "" cat "${LOGFILE_PATH}" - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + exit 1 fi done + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + else { - echo '```' echo "Failed on cloning and building global-workflowi PR: ${pr}" echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" fi - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" done # looping over each open and labeled PR diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py index bfc87672f4..cfa49e0d38 100755 --- a/workflow/create_experiment.py +++ b/workflow/create_experiment.py @@ -34,7 +34,7 @@ _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) # Setup the logger -logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) +logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False) @logit(logger) @@ -77,9 +77,9 @@ def input_args(): data.update(os.environ) testconf = parse_j2yaml(path=user_inputs.yaml, data=data) - if 'exclude' in testconf: + if 'skip_ci_on_hosts' in testconf: host = Host() - if host.machine.lower() in [excluded_host.lower() for excluded_host in testconf.exclude]: + if host.machine.lower() in testconf.skip_ci_on_hosts.lower(): logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}') sys.exit(0) From 34a73cf2ed5afefc776d9cc73b6ebf439122a1bb Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:00:51 -0500 Subject: [PATCH 33/34] Reduce gdas builds to 8 #2029 (#2036) Reduce the default number of build jobs for the GDASApp to 8 from 16. This is needed for Orion as the build crashes during a linking step. Though not verified, it appears this may be caused by using too much memory with 16 builds. The issue disappears when using 8 build jobs. Resolves #2029 --- sorc/build_all.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 95183f9065..4ba0b92888 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -141,7 +141,7 @@ fi # Optional DA builds if [[ -d gdas.cd ]]; then - build_jobs["gdas"]=16 + build_jobs["gdas"]=8 big_jobs=$((big_jobs+1)) build_opts["gdas"]="${_verbose_opt}" fi From 5183c43bbbd07285869feacb49d8680afe85929a Mon Sep 17 00:00:00 2001 From: Barry Baker Date: Tue, 7 Nov 2023 01:14:40 -0500 Subject: [PATCH 34/34] Update UFS for dust fix and remove nitrates by default (#1989) This PR updates the `ufs-weather-model` to the latest hash that included the dust fix (PR #1922). Along with this I removed the nitrates by default in support of the GEFSv13 EP4 and EP5 experiments. Removed unneeded inputs that should help with speed improvements. --- Externals.cfg | 2 +- parm/ufs/chem/AERO_HISTORY.rc | 413 ++++++++++++++--------------- parm/ufs/chem/CAP.rc | 11 +- parm/ufs/chem/DU2G_instance_DU.rc | 4 +- parm/ufs/chem/ExtData.gbbepx | 8 +- parm/ufs/chem/ExtData.other | 39 ++- parm/ufs/chem/ExtData.qfed | 8 +- parm/ufs/chem/GOCART2G_GridComp.rc | 2 +- parm/ufs/chem/gocart_tracer.list | 5 - parm/ufs/fv3/diag_table.aero | 10 +- parm/ufs/fv3/field_table.aero | 50 ++-- sorc/checkout.sh | 2 +- 12 files changed, 274 insertions(+), 280 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index abe5f30aaf..e5d7c0d8c9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ protocol = git required = True [UFS] -tag = 68050e5 +tag = 63a43d9 local_path = sorc/ufs_model.fd repo_url = https://github.com/ufs-community/ufs-weather-model.git protocol = git diff --git a/parm/ufs/chem/AERO_HISTORY.rc b/parm/ufs/chem/AERO_HISTORY.rc index 19f572bb8c..4c7df15b2a 100644 --- a/parm/ufs/chem/AERO_HISTORY.rc +++ b/parm/ufs/chem/AERO_HISTORY.rc @@ -6,7 +6,7 @@ VERSION: 1 EXPID: gocart EXPDSC: GOCART2g_diagnostics_at_c360 EXPSRC: GEOSgcm-v10.16.0 - +Allow_Overwrite: .true. COLLECTIONS: 'inst_aod' # 'inst_du_ss' @@ -25,7 +25,7 @@ COLLECTIONS: 'inst_aod' # 'tavg_du_bin' # 'tavg_2d_rad' # 'tavg_3d_rad' - :: + :: ################################################## # The GRID_LABELS section must be after the main # @@ -63,8 +63,8 @@ PC720x361-DC.LM: 72 inst_du_ss.ref_time: 000000 , inst_du_ss.nbits: 10, inst_du_ss.fields: 'DU' , 'DU' , - 'SS' , 'SS' , - :: + 'SS' , 'SS' , + :: tavg_du_ss.format: 'CFIO' , tavg_du_ss.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -77,8 +77,8 @@ PC720x361-DC.LM: 72 tavg_du_ss.ref_time: 000000 , tavg_du_ss.nbits: 10, tavg_du_ss.fields: 'DU' , 'DU' , - 'SS' , 'SS' , - :: + 'SS' , 'SS' , + :: inst_ca.format: 'CFIO' , inst_ca.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -90,10 +90,10 @@ PC720x361-DC.LM: 72 inst_ca.ref_time: 000000 , inst_ca.nbits: 10, inst_ca.fields: 'CAphilicCA.bc' , 'CA.bc' , - 'CAphobicCA.bc' , 'CA.bc' , - 'CAphilicCA.oc' , 'CA.oc' , - 'CAphobicCA.oc' , 'CA.oc' , - :: + 'CAphobicCA.bc' , 'CA.bc' , + 'CAphilicCA.oc' , 'CA.oc' , + 'CAphobicCA.oc' , 'CA.oc' , + :: inst_ni.format: 'CFIO' , inst_ni.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -105,11 +105,11 @@ PC720x361-DC.LM: 72 inst_ni.ref_time: 000000 , inst_ni.nbits: 10, inst_ni.fields: 'NH3' , 'NI' , - 'NH4a' , 'NI' , - 'NO3an1' , 'NI' , - 'NO3an2' , 'NI' , - 'NO3an3' , 'NI' , - :: + 'NH4a' , 'NI' , + 'NO3an1' , 'NI' , + 'NO3an2' , 'NI' , + 'NO3an3' , 'NI' , + :: inst_su.format: 'CFIO' , inst_su.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -121,10 +121,10 @@ PC720x361-DC.LM: 72 inst_su.ref_time: 000000 , inst_su.nbits: 10, inst_su.fields: 'DMS' , 'SU' , - 'SO2' , 'SU' , - 'SO4' , 'SU' , - 'MSA' , 'SU' , - :: + 'SO2' , 'SU' , + 'SO4' , 'SU' , + 'MSA' , 'SU' , + :: # # Binned aerosols # @@ -140,11 +140,11 @@ PC720x361-DC.LM: 72 inst_du_bin.ref_time: 000000 , inst_du_bin.nbits: 10, inst_du_bin.fields: 'DUEM' , 'DU' , - 'DUSD' , 'DU' , - 'DUDP' , 'DU' , - 'DUWT' , 'DU' , - 'DUSV' , 'DU' , - :: + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: tavg_du_bin.format: 'CFIO' , tavg_du_bin.descr: '2d,Hourly,Instantaneous' @@ -157,11 +157,11 @@ PC720x361-DC.LM: 72 tavg_du_bin.ref_time: 000000 , tavg_du_bin.nbits: 10, tavg_du_bin.fields: 'DUEM' , 'DU' , - 'DUSD' , 'DU' , - 'DUDP' , 'DU' , - 'DUWT' , 'DU' , - 'DUSV' , 'DU' , - :: + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: inst_ss_bin.format: 'CFIO' , inst_ss_bin.descr: '2d,Hourly,Instantaneous' @@ -174,11 +174,11 @@ PC720x361-DC.LM: 72 inst_ss_bin.ref_time: 000000 , inst_ss_bin.nbits: 10, inst_ss_bin.fields: 'SSEM' , 'SS' , - 'SSSD' , 'SS' , - 'SSDP' , 'SS' , - 'SSWT' , 'SS' , - 'SSSV' , 'SS' , - :: + 'SSSD' , 'SS' , + 'SSDP' , 'SS' , + 'SSWT' , 'SS' , + 'SSSV' , 'SS' , + :: inst_ca_bin.format: 'CFIO' , inst_ca_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -191,16 +191,16 @@ PC720x361-DC.LM: 72 inst_ca_bin.ref_time: 000000 , inst_ca_bin.nbits: 10, inst_ca_bin.fields: 'CAEMCA.bc' , 'CA.bc' , - 'CAEMCA.oc' , 'CA.oc' , - 'CASDCA.bc' , 'CA.bc' , - 'CASDCA.oc' , 'CA.oc' , - 'CADPCA.bc' , 'CA.bc' , - 'CADPCA.oc' , 'CA.oc' , - 'CAWTCA.bc' , 'CA.bc' , - 'CAWTCA.oc' , 'CA.oc' , - 'CASVCA.bc' , 'CA.bc' , - 'CASVCA.oc' , 'CA.oc' , - :: + 'CAEMCA.oc' , 'CA.oc' , + 'CASDCA.bc' , 'CA.bc' , + 'CASDCA.oc' , 'CA.oc' , + 'CADPCA.bc' , 'CA.bc' , + 'CADPCA.oc' , 'CA.oc' , + 'CAWTCA.bc' , 'CA.bc' , + 'CAWTCA.oc' , 'CA.oc' , + 'CASVCA.bc' , 'CA.bc' , + 'CASVCA.oc' , 'CA.oc' , + :: inst_ni_bin.format: 'CFIO' , inst_ni_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -213,11 +213,11 @@ PC720x361-DC.LM: 72 inst_ni_bin.ref_time: 000000 , inst_ni_bin.nbits: 10, inst_ni_bin.fields: 'NIHT' , 'NI' , - 'NISD' , 'NI' , - 'NIDP' , 'NI' , - 'NIWT' , 'NI' , - 'NISV' , 'NI' , - :: + 'NISD' , 'NI' , + 'NIDP' , 'NI' , + 'NIWT' , 'NI' , + 'NISV' , 'NI' , + :: inst_su_bin.format: 'CFIO' , inst_su_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -230,11 +230,11 @@ PC720x361-DC.LM: 72 inst_su_bin.ref_time: 000000 , inst_su_bin.nbits: 10, inst_su_bin.fields: 'SUEM' , 'SU', - 'SUSD' , 'SU', - 'SUDP' , 'SU', - 'SUWT' , 'SU', - 'SUSV' , 'SU', - :: + 'SUSD' , 'SU', + 'SUDP' , 'SU', + 'SUWT' , 'SU', + 'SUSV' , 'SU', + :: # # Other 2d diagnostics @@ -249,92 +249,92 @@ PC720x361-DC.LM: 72 inst_2d.ref_time: 000000, inst_2d.grid_label: PC720x361-DC inst_2d.fields: 'DUSMASS' , 'DU' , - 'DUCMASS' , 'DU' , - 'DUSMASS25' , 'DU' , - 'DUCMASS25' , 'DU' , - 'DUAERIDX' , 'DU' , - 'DUFLUXU' , 'DU' , - 'DUFLUXV' , 'DU' , - 'DUANGSTR' , 'DU' , - 'SSSMASS' , 'SS' , - 'SSCMASS' , 'SS' , - 'SSSMASS25' , 'SS' , - 'SSCMASS25' , 'SS' , - 'SSAERIDX' , 'SS' , - 'SSANGSTR' , 'SS' , - 'SSFLUXU' , 'SS' , - 'SSFLUXV' , 'SS' , - 'CAEMANCA.bc' , 'CA.bc' , - 'CAEMANCA.oc' , 'CA.oc' , - 'CAEMBBCA.bc' , 'CA.bc' , - 'CAEMBBCA.oc' , 'CA.oc' , - 'CAEMBFCA.bc' , 'CA.bc' , - 'CAEMBFCA.oc' , 'CA.oc' , - 'CAEMBGCA.bc' , 'CA.bc' , - 'CAEMBGCA.oc' , 'CA.oc' , - 'CAHYPHILCA.bc' , 'CA.bc' , - 'CAHYPHILCA.oc' , 'CA.oc' , - 'CAPSOACA.bc' , 'CA.bc' , - 'CAPSOACA.oc' , 'CA.oc' , - 'CASMASSCA.bc' , 'CA.bc' , - 'CASMASSCA.oc' , 'CA.oc' , - 'CACMASSCA.bc' , 'CA.bc' , - 'CACMASSCA.oc' , 'CA.oc' , - 'CAANGSTRCA.bc' , 'CA.bc' , - 'CAANGSTRCA.oc' , 'CA.oc' , - 'CAFLUXUCA.bc' , 'CA.bc' , - 'CAFLUXUCA.oc' , 'CA.oc' , - 'CAFLUXVCA.bc' , 'CA.bc' , - 'CAFLUXVCA.oc' , 'CA.oc' , - 'CAAERIDXCA.bc' , 'CA.bc' , - 'CAAERIDXCA.oc' , 'CA.oc' , - 'NIPNO3AQ' , 'NI' , - 'NIPNH4AQ' , 'NI' , - 'NIPNH3AQ' , 'NI' , - 'NH3EM' , 'NI' , - 'NH3DP' , 'NI' , - 'NH3WT' , 'NI' , - 'NH3SV' , 'NI' , - 'NH4SD' , 'NI' , - 'NH4DP' , 'NI' , - 'NH4WT' , 'NI' , - 'NH4SV' , 'NI' , - 'HNO3SMASS' , 'NI' , - 'NH3SMASS' , 'NI' , - 'NH4SMASS' , 'NI' , - 'NISMASS' , 'NI' , - 'NISMASS25' , 'NI' , - 'HNO3CMASS' , 'NI' , - 'NH3CMASS' , 'NI' , - 'NH4CMASS' , 'NI' , - 'NICMASS' , 'NI' , - 'NICMASS25' , 'NI' , - 'NIANGSTR' , 'NI' , - 'NIFLUXU' , 'NI' , - 'NIFLUXV' , 'NI' , - 'SUPSO2' , 'SU' , - 'SUPSO4' , 'SU' , - 'SUPSO4G' , 'SU' , - 'SUPSO4AQ' , 'SU' , - 'SUPSO4WT' , 'SU' , - 'SUPMSA' , 'SU' , - 'SO2SMASS' , 'SU' , - 'SO2CMASS' , 'SU' , - 'SO4SMASS' , 'SU' , - 'SO4CMASS' , 'SU' , - 'DMSSMASS' , 'SU' , - 'DMSCMASS' , 'SU' , - 'MSASMASS' , 'SU' , - 'MSACMASS' , 'SU' , - 'SUANGSTR' , 'SU' , - 'SUFLUXU' , 'SU' , - 'SUFLUXV' , 'SU' , - 'SO4EMAN' , 'SU' , - 'SO2EMAN' , 'SU' , - 'SO2EMBB' , 'SU' , - 'SO2EMVN' , 'SU' , - 'SO2EMVE' , 'SU' , - :: + 'DUCMASS' , 'DU' , + 'DUSMASS25' , 'DU' , + 'DUCMASS25' , 'DU' , + 'DUAERIDX' , 'DU' , + 'DUFLUXU' , 'DU' , + 'DUFLUXV' , 'DU' , + 'DUANGSTR' , 'DU' , + 'SSSMASS' , 'SS' , + 'SSCMASS' , 'SS' , + 'SSSMASS25' , 'SS' , + 'SSCMASS25' , 'SS' , + 'SSAERIDX' , 'SS' , + 'SSANGSTR' , 'SS' , + 'SSFLUXU' , 'SS' , + 'SSFLUXV' , 'SS' , + 'CAEMANCA.bc' , 'CA.bc' , + 'CAEMANCA.oc' , 'CA.oc' , + 'CAEMBBCA.bc' , 'CA.bc' , + 'CAEMBBCA.oc' , 'CA.oc' , + 'CAEMBFCA.bc' , 'CA.bc' , + 'CAEMBFCA.oc' , 'CA.oc' , + 'CAEMBGCA.bc' , 'CA.bc' , + 'CAEMBGCA.oc' , 'CA.oc' , + 'CAHYPHILCA.bc' , 'CA.bc' , + 'CAHYPHILCA.oc' , 'CA.oc' , + 'CAPSOACA.bc' , 'CA.bc' , + 'CAPSOACA.oc' , 'CA.oc' , + 'CASMASSCA.bc' , 'CA.bc' , + 'CASMASSCA.oc' , 'CA.oc' , + 'CACMASSCA.bc' , 'CA.bc' , + 'CACMASSCA.oc' , 'CA.oc' , + 'CAANGSTRCA.bc' , 'CA.bc' , + 'CAANGSTRCA.oc' , 'CA.oc' , + 'CAFLUXUCA.bc' , 'CA.bc' , + 'CAFLUXUCA.oc' , 'CA.oc' , + 'CAFLUXVCA.bc' , 'CA.bc' , + 'CAFLUXVCA.oc' , 'CA.oc' , + 'CAAERIDXCA.bc' , 'CA.bc' , + 'CAAERIDXCA.oc' , 'CA.oc' , + 'NIPNO3AQ' , 'NI' , + 'NIPNH4AQ' , 'NI' , + 'NIPNH3AQ' , 'NI' , + 'NH3EM' , 'NI' , + 'NH3DP' , 'NI' , + 'NH3WT' , 'NI' , + 'NH3SV' , 'NI' , + 'NH4SD' , 'NI' , + 'NH4DP' , 'NI' , + 'NH4WT' , 'NI' , + 'NH4SV' , 'NI' , + 'HNO3SMASS' , 'NI' , + 'NH3SMASS' , 'NI' , + 'NH4SMASS' , 'NI' , + 'NISMASS' , 'NI' , + 'NISMASS25' , 'NI' , + 'HNO3CMASS' , 'NI' , + 'NH3CMASS' , 'NI' , + 'NH4CMASS' , 'NI' , + 'NICMASS' , 'NI' , + 'NICMASS25' , 'NI' , + 'NIANGSTR' , 'NI' , + 'NIFLUXU' , 'NI' , + 'NIFLUXV' , 'NI' , + 'SUPSO2' , 'SU' , + 'SUPSO4' , 'SU' , + 'SUPSO4G' , 'SU' , + 'SUPSO4AQ' , 'SU' , + 'SUPSO4WT' , 'SU' , + 'SUPMSA' , 'SU' , + 'SO2SMASS' , 'SU' , + 'SO2CMASS' , 'SU' , + 'SO4SMASS' , 'SU' , + 'SO4CMASS' , 'SU' , + 'DMSSMASS' , 'SU' , + 'DMSCMASS' , 'SU' , + 'MSASMASS' , 'SU' , + 'MSACMASS' , 'SU' , + 'SUANGSTR' , 'SU' , + 'SUFLUXU' , 'SU' , + 'SUFLUXV' , 'SU' , + 'SO4EMAN' , 'SU' , + 'SO2EMAN' , 'SU' , + 'SO2EMBB' , 'SU' , + 'SO2EMVN' , 'SU' , + 'SO2EMVE' , 'SU' , + :: # # 3d diagnostics @@ -348,30 +348,30 @@ PC720x361-DC.LM: 72 inst_3d.ref_time: 000000, inst_3d.grid_label: PC720x361-DC inst_3d.fields: 'DUMASS' , 'DU', - 'DUMASS25' , 'DU', - 'DUCONC' , 'DU', - 'SSMASS' , 'SS', - 'SSMASS25' , 'SS', - 'SSCONC' , 'SS', - 'CAMASSCA.bc' , 'CA.bc' , - 'CACONCCA.bc' , 'CA.bc' , - 'CAMASSCA.oc' , 'CA.oc' , - 'CACONCCA.oc' , 'CA.oc' , - 'SO4MASS' , 'SU', - 'SO4SAREA' , 'SU', - 'SO4SNUM' , 'SU', - 'SUCONC' , 'SU', - 'PSO2' , 'SU', - 'PMSA' , 'SU', - 'PSO4' , 'SU', - 'PSO4G' , 'SU', - 'PSO4WET' , 'SU', - 'PSO4AQ' , 'SU', - 'DMS' , 'SU', - 'SO2' , 'SU', - 'SO4' , 'SU', - 'MSA' , 'SU', - :: + 'DUMASS25' , 'DU', + 'DUCONC' , 'DU', + 'SSMASS' , 'SS', + 'SSMASS25' , 'SS', + 'SSCONC' , 'SS', + 'CAMASSCA.bc' , 'CA.bc' , + 'CACONCCA.bc' , 'CA.bc' , + 'CAMASSCA.oc' , 'CA.oc' , + 'CACONCCA.oc' , 'CA.oc' , + 'SO4MASS' , 'SU', + 'SO4SAREA' , 'SU', + 'SO4SNUM' , 'SU', + 'SUCONC' , 'SU', + 'PSO2' , 'SU', + 'PMSA' , 'SU', + 'PSO4' , 'SU', + 'PSO4G' , 'SU', + 'PSO4WET' , 'SU', + 'PSO4AQ' , 'SU', + 'DMS' , 'SU', + 'SO2' , 'SU', + 'SO4' , 'SU', + 'MSA' , 'SU', + :: # @@ -386,13 +386,12 @@ PC720x361-DC.LM: 72 inst_aod.ref_time: 000000, inst_aod.grid_label: PC720x361-DC inst_aod.fields: 'CA.bcEXTTAU' , 'CA.bc' , 'AOD_BC', - 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', - 'DUEXTTAU' , 'DU' , 'AOD_DU', - 'NIEXTTAU' , 'NI' , 'AOD_NI', - 'SSEXTTAU' , 'SS' , 'AOD_SS', - 'SUEXTTAU' , 'SU' , 'AOD_SU', - 'TOTEXTTAU' , 'GOCART2G' , 'AOD' , - :: + 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', + 'DUEXTTAU' , 'DU' , 'AOD_DU', + 'SSEXTTAU' , 'SS' , 'AOD_SS', + 'SUEXTTAU' , 'SU' , 'AOD_SU', + 'TOTEXTTAU' , 'GOCART2G' , 'AOD' , + :: tavg_2d_rad.format: 'CFIO' , @@ -404,30 +403,30 @@ PC720x361-DC.LM: 72 tavg_2d_rad.ref_time: 000000, tavg_2d_rad.grid_label: PC720x361-DC tavg_2d_rad.fields: 'CA.bcEXTTAU' , 'CA.bc' , - 'CA.ocEXTTAU' , 'CA.oc' , - 'CASCATAUCA.bc' , 'CA.bc' , - 'CASCATAUCA.oc' , 'CA.oc' , - 'DUEXTTAU' , 'DU' , - 'DUSCATAU' , 'DU' , - 'DUEXTT25' , 'DU' , - 'DUSCAT25' , 'DU' , - 'DUEXTTFM' , 'DU' , - 'DUSCATFM' , 'DU' , - 'NIEXTTFM' , 'NI' , - 'NISCATFM' , 'NI' , - 'NIEXTT25' , 'NI' , - 'NISCAT25' , 'NI' , - 'NIEXTTAU' , 'NI' , - 'NISCATAU' , 'NI' , - 'SSEXTTAU' , 'SS' , - 'SSSCATAU' , 'SS' , - 'SSEXTT25' , 'SS' , - 'SSSCAT25' , 'SS' , - 'SSEXTTFM' , 'SS' , - 'SSSCATFM' , 'SS' , - 'SUEXTTAU' , 'SU' , - 'SUSCATAU' , 'SU' , - :: + 'CA.ocEXTTAU' , 'CA.oc' , + 'CASCATAUCA.bc' , 'CA.bc' , + 'CASCATAUCA.oc' , 'CA.oc' , + 'DUEXTTAU' , 'DU' , + 'DUSCATAU' , 'DU' , + 'DUEXTT25' , 'DU' , + 'DUSCAT25' , 'DU' , + 'DUEXTTFM' , 'DU' , + 'DUSCATFM' , 'DU' , + 'NIEXTTFM' , 'NI' , + 'NISCATFM' , 'NI' , + 'NIEXTT25' , 'NI' , + 'NISCAT25' , 'NI' , + 'NIEXTTAU' , 'NI' , + 'NISCATAU' , 'NI' , + 'SSEXTTAU' , 'SS' , + 'SSSCATAU' , 'SS' , + 'SSEXTT25' , 'SS' , + 'SSSCAT25' , 'SS' , + 'SSEXTTFM' , 'SS' , + 'SSSCATFM' , 'SS' , + 'SUEXTTAU' , 'SU' , + 'SUSCATAU' , 'SU' , + :: tavg_3d_rad.format: 'CFIO' , tavg_3d_rad.template: '%y4%m2%d2_%h2%n2z.nc4', @@ -439,15 +438,15 @@ PC720x361-DC.LM: 72 tavg_3d_rad.grid_label: PC720x361-DC tavg_3d_rad.splitField: 1, tavg_3d_rad.fields: 'CAEXTCOEFCA.bc' , 'CA.bc' , - 'CAEXTCOEFCA.oc' , 'CA.oc' , - 'CASCACOEFCA.bc' , 'CA.bc' , - 'CASCACOEFCA.oc' , 'CA.oc' , - 'DUEXTCOEF' , 'DU' , - 'DUSCACOEF' , 'DU' , - 'NIEXTCOEF' , 'NI' , - 'NISCACOEF' , 'NI' , - 'SSEXTCOEF' , 'SS' , - 'SSSCACOEF' , 'SS' , - 'SUEXTCOEF' , 'SU' , - 'SUSCACOEF' , 'SU' , - :: + 'CAEXTCOEFCA.oc' , 'CA.oc' , + 'CASCACOEFCA.bc' , 'CA.bc' , + 'CASCACOEFCA.oc' , 'CA.oc' , + 'DUEXTCOEF' , 'DU' , + 'DUSCACOEF' , 'DU' , + 'NIEXTCOEF' , 'NI' , + 'NISCACOEF' , 'NI' , + 'SSEXTCOEF' , 'SS' , + 'SSSCACOEF' , 'SS' , + 'SUEXTCOEF' , 'SU' , + 'SUSCACOEF' , 'SU' , + :: diff --git a/parm/ufs/chem/CAP.rc b/parm/ufs/chem/CAP.rc index d40106ae81..2b8e71975b 100644 --- a/parm/ufs/chem/CAP.rc +++ b/parm/ufs/chem/CAP.rc @@ -64,12 +64,13 @@ CAP_EXPORTS: CA.bcphilic,CA.bc bc2 CA.ocphobic,CA.oc oc1 CA.ocphilic,CA.oc oc2 - NH3,NI nh3 - NH4a,NI nh4a - NO3an1,NI no3an1 - NO3an2,NI no3an2 - NO3an3,NI no3an3 :: +# NH3,NI nh3 +# NH4a,NI nh4a +# NO3an1,NI no3an1 +# NO3an2,NI no3an2 +# NO3an3,NI no3an3 +# :: # Diagnostic Tracers Table (only PM10 & PM25 available) diff --git a/parm/ufs/chem/DU2G_instance_DU.rc b/parm/ufs/chem/DU2G_instance_DU.rc index c701efb128..6c30cdf06b 100644 --- a/parm/ufs/chem/DU2G_instance_DU.rc +++ b/parm/ufs/chem/DU2G_instance_DU.rc @@ -41,6 +41,8 @@ pressure_lid_in_hPa: 0.01 emission_scheme: fengsha # choose among: fengsha, ginoux, k14 # FENGSHA settings -alpha: 0.04 +alpha: 0.1 gamma: 1.0 +soil_moisture_factor: 1 +soil_drylimit_factor: 1 vertical_to_horizontal_flux_ratio_limit: 2.e-04 diff --git a/parm/ufs/chem/ExtData.gbbepx b/parm/ufs/chem/ExtData.gbbepx index 0661e8412a..3bd516c772 100644 --- a/parm/ufs/chem/ExtData.gbbepx +++ b/parm/ufs/chem/ExtData.gbbepx @@ -2,7 +2,7 @@ # GBBEPx #-------------------------------------------------------------------------------------------------------------------------------- -SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none none NH3 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 SO2 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 OC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 BC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 NH3 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc diff --git a/parm/ufs/chem/ExtData.other b/parm/ufs/chem/ExtData.other index 5eb1e1dd0b..789576305e 100644 --- a/parm/ufs/chem/ExtData.other +++ b/parm/ufs/chem/ExtData.other @@ -7,16 +7,13 @@ TROPP 'Pa' Y N - 0.0 1.0 TROPP /dev/null:10000. #====== Dust Imports ================================================= -# Ginoux input files -DU_SRC NA N Y - none none du_src ExtData/Dust/gocart.dust_source.v5a.x1152_y721.nc - # FENGSHA input files. Note: regridding should be N or E - Use files with _FillValue != NaN -DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc -DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_CLAY '1' Y E - none none clayfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_SAND '1' Y E - none none sandfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc DU_SILT '1' Y E - none none siltfrac /dev/null -DU_SSM '1' Y E - none none ssm /dev/null:1.0 -DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/Dust/FENGSHA_p81_10km_inputs.nc -DU_UTHRES '1' Y E - none none uthres ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_SSM '1' Y E - none none sep ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_UTHRES '1' Y E - none none uthres ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers @@ -135,16 +132,16 @@ BRC_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null # SOA production pSOA_BIOB_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null -# ======= Nitrate Sources ======== -EMI_NH3_AG 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_ag ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_EN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_en /dev/null -EMI_NH3_IN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_in ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_RE 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_re ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_TR 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_tr ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_OC 'kg m-2 s-1' Y Y %y4-%m2-%d2T12:00:00 none none emiss_ocn ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4 - -# -------------------------------------------------------------- -# If using 64 levels please replace this section with the correct values (ie replace 127 with 64) -NITRATE_HNO3 'mol mol-1' Y N %y4-%m2-%d2T12:00:00 none 0.20 hno3 ExtData/PIESA/L127/GMI.vmr_HNO3.x144_y91.t12.2006.nc4 -# -------------------------------------------------------------- -NI_regionMask NA Y V - none none REGION_MASK ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc +# # ======= Nitrate Sources ======== +# EMI_NH3_AG 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_ag ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_EN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_en /dev/null +# EMI_NH3_IN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_in ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_RE 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_re ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_TR 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_tr ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_OC 'kg m-2 s-1' Y Y %y4-%m2-%d2T12:00:00 none none emiss_ocn ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4 + +# # -------------------------------------------------------------- +# # If using 64 levels please replace this section with the correct values (ie replace 127 with 64) +# NITRATE_HNO3 'mol mol-1' Y N %y4-%m2-%d2T12:00:00 none 0.20 hno3 ExtData/PIESA/L127/GMI.vmr_HNO3.x144_y91.t12.2006.nc4 +# # -------------------------------------------------------------- +# NI_regionMask NA Y V - none none REGION_MASK ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc diff --git a/parm/ufs/chem/ExtData.qfed b/parm/ufs/chem/ExtData.qfed index 86ab3c86cc..b3a721211e 100644 --- a/parm/ufs/chem/ExtData.qfed +++ b/parm/ufs/chem/ExtData.qfed @@ -2,7 +2,7 @@ # QFED #-------------------------------------------------------------------------------------------------------------------------------- -SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_so2.006.%y4%m2%d2.nc4 -OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_oc.006.%y4%m2%d2.nc4 -BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_bc.006.%y4%m2%d2.nc4 -EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_nh3.006.%y4%m2%d2.nc4 +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_so2.006.%y4%m2%d2.nc4 +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_oc.006.%y4%m2%d2.nc4 +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_bc.006.%y4%m2%d2.nc4 +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_nh3.006.%y4%m2%d2.nc4 diff --git a/parm/ufs/chem/GOCART2G_GridComp.rc b/parm/ufs/chem/GOCART2G_GridComp.rc index 18954f8cdd..2dd63a6d17 100644 --- a/parm/ufs/chem/GOCART2G_GridComp.rc +++ b/parm/ufs/chem/GOCART2G_GridComp.rc @@ -31,7 +31,7 @@ PASSIVE_INSTANCES_SU: ACTIVE_INSTANCES_CA: CA.oc CA.bc # CA.oc.data CA.bc.data PASSIVE_INSTANCES_CA: -ACTIVE_INSTANCES_NI: NI # NI.data +ACTIVE_INSTANCES_NI: # NI # NI.data PASSIVE_INSTANCES_NI: # Set optics parameters diff --git a/parm/ufs/chem/gocart_tracer.list b/parm/ufs/chem/gocart_tracer.list index 8b0202e2c4..8742aff67c 100644 --- a/parm/ufs/chem/gocart_tracer.list +++ b/parm/ufs/chem/gocart_tracer.list @@ -16,10 +16,5 @@ seas2 seas3 seas4 seas5 -nh3 -nh4a -no3an1 -no3an2 -no3an3 pm25 pm10 diff --git a/parm/ufs/fv3/diag_table.aero b/parm/ufs/fv3/diag_table.aero index 683c50cc4a..6f96b462f1 100644 --- a/parm/ufs/fv3/diag_table.aero +++ b/parm/ufs/fv3/diag_table.aero @@ -19,10 +19,10 @@ "gfs_dyn", "seas3", "seas3", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "seas4", "seas4", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "seas5", "seas5", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nh3", "nh3", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nh4a", "nh4a", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an1", "no3an1", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an2", "no3an2", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an3", "no3an3", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "nh3", "nh3", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "nh4a", "nh4a", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an1", "no3an1", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an2", "no3an2", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an3", "no3an3", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "pm25", "pm25", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "pm10", "pm10", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/fv3/field_table.aero b/parm/ufs/fv3/field_table.aero index d917dd786c..385192803f 100644 --- a/parm/ufs/fv3/field_table.aero +++ b/parm/ufs/fv3/field_table.aero @@ -89,31 +89,31 @@ "units", "ug/kg" "tracer_usage", "chemistry" "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "nh3" - "longname", "primary NH3 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "nh4a" - "longname", "primary NH4a mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an1" - "longname", "primary NO3an1 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an2" - "longname", "primary NO3an2 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an3" - "longname", "primary NO3an3 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "nh3" +# "longname", "primary NH3 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "nh4a" +# "longname", "primary NH4a mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an1" +# "longname", "primary NO3an1 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an2" +# "longname", "primary NO3an2 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an3" +# "longname", "primary NO3an3 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / # diagnostic PM tracers "TRACER", "atmos_mod", "pm25" "longname", "primary PM25 mixing ratio" diff --git a/sorc/checkout.sh b/sorc/checkout.sh index de4fcdf838..382e7b6f32 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 # Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" & +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-63a43d9}" "8" ; errs=$((errs + $?)) # Run all other checkouts simultaneously with just 1 core each to handle submodules. checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" &