From b0656aa9073f7deb0e387c00435c8076d9c58261 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 9 May 2024 18:37:00 +0000 Subject: [PATCH 01/90] start to add new task --- workflow/applications/gfs_cycled.py | 4 ++++ workflow/rocoto/gfs_tasks.py | 29 +++++++++++++++++++++++++++++ workflow/rocoto/tasks.py | 2 +- 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4d785bc4da..464137d6a6 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -111,6 +111,8 @@ def _get_app_configs(self): if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] + if self.do_hybvar: + configs += ['esnowanl'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -169,6 +171,8 @@ def get_task_names(self): hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] + if self.do_jedisnowda: + hybrid_after_eupd_tasks += ['esnowanl'] # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 6125a33dec..c4c761049a 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -602,6 +602,32 @@ def snowanl(self): task = rocoto.create_task(task_dict) return task + def esnowanl(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prepsnowobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}snowanl'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esnowanl') + task_name = f'{self.cdump}esnowanl' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.cdump.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowanl.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + return task + + def prepoceanobs(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) @@ -2526,6 +2552,9 @@ def esfc(self): else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedisnowda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}esnowanl'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esfc') diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index a8b4eb9fac..245bd1c83b 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', - 'prepsnowobs', 'snowanl', + 'prepsnowobs', 'snowanl', 'esnowanl', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmosprod', 'oceanprod', 'iceprod', From d16119dc29ed36b443b71c13c74e1be82b5f53d0 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 9 May 2024 18:55:31 +0000 Subject: [PATCH 02/90] add new j-job --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 47 +++++++++++++++++++++++++++++ jobs/rocoto/esnowanl.sh | 24 +++++++++++++++ 2 files changed, 71 insertions(+) create mode 100755 jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS create mode 100755 jobs/rocoto/esnowanl.sh diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS new file mode 100755 index 0000000000..b7d8c37060 --- /dev/null +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -0,0 +1,47 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_SNOW_ANALYSIS COM_CONF + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" + +############################################################### +# Run relevant script + +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_analysis.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh new file mode 100755 index 0000000000..627dd860f4 --- /dev/null +++ b/jobs/rocoto/esnowanl.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="snowanl" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS" +status=$? +exit "${status}" From b4c97f84a5ea17a32dc8b19436bd3148b5996845 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 9 May 2024 19:13:53 +0000 Subject: [PATCH 03/90] Save to do some initial testing --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 6 ++--- jobs/rocoto/esnowanl.sh | 4 +-- parm/config/gfs/config.esnowanl | 14 ++++++++++ parm/config/gfs/config.resources | 31 +++++++++++++++++++++- scripts/exglobal_snow_ensemble_analysis.py | 24 +++++++++++++++++ 5 files changed, 73 insertions(+), 6 deletions(-) create mode 100644 parm/config/gfs/config.esnowanl create mode 100755 scripts/exglobal_snow_ensemble_analysis.py diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index b7d8c37060..b8a0863a69 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -1,8 +1,8 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" +export DATA=${DATA:-${DATAROOT}/${RUN}esnowanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" ############################################## # Set variables used in the script @@ -28,7 +28,7 @@ mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_analysis.py} +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_ensemble_analysis.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh index 627dd860f4..6aa640ee5e 100755 --- a/jobs/rocoto/esnowanl.sh +++ b/jobs/rocoto/esnowanl.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="snowanl" +export job="esnowanl" export jobid="${job}.$$" ############################################################### @@ -19,6 +19,6 @@ export PYTHONPATH ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS" +"${HOMEgfs}/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS" status=$? exit "${status}" diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl new file mode 100644 index 0000000000..c340791d23 --- /dev/null +++ b/parm/config/gfs/config.esnowanl @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.esnowanl ########## +# configuration common to snow ensemble analysis tasks + +echo "BEGIN: config.esnowanl" + +# Get task specific resources +source "${EXPDIR}/config.resources" esnowanl + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +echo "END: config.esnowanl" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 89953c7b84..d7c86078a1 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -12,7 +12,7 @@ if (( $# != 1 )); then echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlrun atmensanlfinal" - echo "snowanl" + echo "snowanl esnowanl" echo "aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" @@ -274,6 +274,35 @@ case ${step} in export npe_node_snowanl=$(( npe_node_max / nth_snowanl )) ;; + "esnowanl") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=6 + layout_y=6 + ;; + "C384") + layout_x=5 + layout_y=5 + ;; + "C192" | "C96" | "C48") + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + export wtime_esnowanl="00:15:00" + export npe_esnowanl=$(( layout_x * layout_y * 6 )) + export nth_esnowanl=1 + export npe_node_esnowanl=$(( npe_node_max / nth_esnowanl )) + ;; + "aeroanlinit") # below lines are for creating JEDI YAML case ${CASE} in diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py new file mode 100755 index 0000000000..67faab97fd --- /dev/null +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# exglobal_snow_ensemble_analysis.py +# This script creates an SnowEnsAnalysis class +# and runs the initialize, execute and finalize methods +# for a global Snow Depth ensemble analysis +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.snow_ens_analysis import SnowEnsAnalysis + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the snow ensemble analysis task + anl = SnowEnsAnalysis(config) + anl.initialize() + anl.execute() + anl.finalize() From 3667349f20abc51fed1cae87473a8e18659f3fb8 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 9 May 2024 19:23:47 +0000 Subject: [PATCH 04/90] more updates --- scripts/exglobal_snow_ensemble_analysis.py | 2 +- ush/python/pygfs/task/snowens_analysis.py | 214 +++++++++++++++++++++ 2 files changed, 215 insertions(+), 1 deletion(-) create mode 100644 ush/python/pygfs/task/snowens_analysis.py diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py index 67faab97fd..0fdd4de152 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -6,7 +6,7 @@ import os from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.snow_ens_analysis import SnowEnsAnalysis +from pygfs.task.snowens_analysis import SnowEnsAnalysis # Initialize root logger logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py new file mode 100644 index 0000000000..995c1a23fb --- /dev/null +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, List +from pprint import pformat +import numpy as np +from netCDF4 import Dataset + +from wxflow import (AttrDict, + FileHandler, + to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, + rm_p, + parse_j2yaml, save_as_yaml, + Jinja, + logit, + Executable, + WorkflowException) +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class SnowEnsAnalysis(Analysis): + """ + Class for global ensemble snow analysis tasks + """ + + NMEM_SNOWENS = 2 + + @logit(logger, name="SnowEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config['CASE'][1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) + _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'SNOW_WINDOW_BEGIN': _window_begin, + 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", + 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'jedi_yaml': _letkfoi_yaml + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + + @logit(logger) + def initialize(self) -> None: + """Initialize method for snow ensemble analysis + This method: + + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + super().initialize() + + + @logit(logger) + def execute(self) -> None: + """Run a series of tasks to create snow ensemble analysis + This method: + + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + + @logit(logger) + def finalize(self) -> None: + """Performs closing actions of the snow ensemble analysis task + This method: + - + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + + @staticmethod + @logit(logger) + def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) + that are needed for global snow DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + COM_ATMOS_RESTART_PREV + DATA + current_cycle + ntiles + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 sfc_data RESTART files, this will be a lot simpler when using history files + rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) # for now, option later? + run_dir = os.path.join(config.DATA, 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # snow DA needs coupler + basename = f'{to_fv3time(config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # snow DA only needs sfc_data + for ftype in ['sfc_data']: + template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist + } + return bkg_dict + + + @staticmethod + @logit(logger) + def add_increments(config: Dict) -> None: + """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + HOMEgfs + COM_ATMOS_RESTART_PREV + DATA + current_cycle + CASE + OCNRES + ntiles + APPLY_INCR_NML_TMPL + APPLY_INCR_EXE + APRUN_APPLY_INCR + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + + # need backgrounds to create analysis from increments after LETKF + logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") + template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + anllist = [] + for itile in range(1, config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(config.DATA, "anl", filename) + anllist.append([src, dest]) + FileHandler({'copy': anllist}).sync() + + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.APPLY_INCR_NML_TMPL + nml_data = Jinja(nml_template, config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") From 81b7b6ec5d65b7048844e9367d60c7892af1e200 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 16 May 2024 20:55:25 +0000 Subject: [PATCH 05/90] end of day commit --- ush/python/pygfs/task/snowens_analysis.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 995c1a23fb..378e230b7d 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -26,8 +26,6 @@ class SnowEnsAnalysis(Analysis): Class for global ensemble snow analysis tasks """ - NMEM_SNOWENS = 2 - @logit(logger, name="SnowEnsAnalysis") def __init__(self, config): super().__init__(config) @@ -54,7 +52,6 @@ def __init__(self, config): # task_config is everything that this task should need self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) - @logit(logger) def initialize(self) -> None: """Initialize method for snow ensemble analysis @@ -69,7 +66,6 @@ def initialize(self) -> None: super().initialize() - @logit(logger) def execute(self) -> None: """Run a series of tasks to create snow ensemble analysis @@ -82,7 +78,6 @@ def execute(self) -> None: Instance of the SnowEnsAnalysis object """ - @logit(logger) def finalize(self) -> None: """Performs closing actions of the snow ensemble analysis task @@ -145,7 +140,6 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: } return bkg_dict - @staticmethod @logit(logger) def add_increments(config: Dict) -> None: From 3aaa8f65bc8e14f5186ea2c731f748040bd74621 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 17 May 2024 20:46:31 +0000 Subject: [PATCH 06/90] End of day/week commit --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 13 ++++++--- parm/config/gfs/config.esnowanl | 5 ++++ ush/python/pygfs/task/snow_analysis.py | 32 ++++++++++++++++------- ush/python/pygfs/task/snowens_analysis.py | 25 +++++++++++++++--- workflow/applications/gfs_cycled.py | 4 +-- 5 files changed, 61 insertions(+), 18 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index b8a0863a69..ab6232555f 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -13,18 +13,23 @@ GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" +GDUMP_ENS="enkf${GDUMP}" ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_SNOW_ANALYSIS COM_CONF - -RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_ATMOS_ANALYSIS COM_CONF +YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" +for imem in $(seq 1 ${NMEM_ENS}); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS + mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" +done + ############################################################### # Run relevant script diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index c340791d23..cca9ac3d89 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -8,7 +8,12 @@ echo "BEGIN: config.esnowanl" # Get task specific resources source "${EXPDIR}/config.resources" esnowanl +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/fv3jedi_land_ensrecenter.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ +export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x + echo "END: config.esnowanl" diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index fe21a67536..a11fdcb936 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -299,7 +299,7 @@ def execute(self) -> None: keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_ATMOS_RESTART_PREV', 'COM_SNOW_ANALYSIS', 'APREFIX', 'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'OCNRES', 'ntiles', - 'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml', + 'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml', 'DOIAU', 'SNOW_WINDOW_BEGIN', 'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR'] for key in keys: localconf[key] = self.task_config[key] @@ -357,12 +357,18 @@ def finalize(self) -> None: FileHandler(yaml_copy).sync() logger.info("Copy analysis to COM") - template = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + if self.task_config.DOIAU: + bkgtime = self.task_config.SNOW_WINDOW_BEGIN + else: + bkgtime = self.task_config.current_cycle + template_bkg = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + template_anl = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' anllist = [] for itile in range(1, self.task_config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + filename_anl = template_anl.format(tilenum=itile) + filename_bkg = template_bkg.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename_anl) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename_bkg) anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() @@ -542,6 +548,8 @@ def add_increments(config: Dict) -> None: APPLY_INCR_NML_TMPL APPLY_INCR_EXE APRUN_APPLY_INCR + DOIAU + SNOW_WINDOW_BEGIN Raises ------ @@ -553,12 +561,18 @@ def add_increments(config: Dict) -> None: # need backgrounds to create analysis from increments after LETKF logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") - template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + if config.DOIAU: + bkgtime = config.SNOW_WINDOW_BEGIN + else: + bkgtime = config.current_cycle + template_bkg = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + template_anl = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' anllist = [] for itile in range(1, config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) - dest = os.path.join(config.DATA, "anl", filename) + filename_bkg = template_bkg.format(tilenum=itile) + filename_anl = template_anl.format(tilenum=itile) + src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename_bkg) + dest = os.path.join(config.DATA, "anl", filename_anl) anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 378e230b7d..a846981691 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -2,7 +2,7 @@ import os from logging import getLogger -from typing import Dict, List +from typing import Dict, List, Any from pprint import pformat import numpy as np from netCDF4 import Dataset @@ -32,7 +32,7 @@ def __init__(self, config): _res = int(self.config['CASE'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml") + _recenter_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.land_recenter.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -43,9 +43,11 @@ def __init__(self, config): 'npz': self.config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", - 'jedi_yaml': _letkfoi_yaml + 'jedi_yaml': _recenter_yaml, } ) @@ -66,6 +68,9 @@ def initialize(self) -> None: super().initialize() + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") + @logit(logger) def execute(self) -> None: """Run a series of tasks to create snow ensemble analysis @@ -206,3 +211,17 @@ def add_increments(config: Dict) -> None: raise OSError(f"Failed to execute {exe}") except Exception: raise WorkflowException(f"An error occured during execution of {exe}") + + def get_obs_dict(self) -> Dict[str, Any]: + obs_dict = { + 'mkdir': [], + 'copy': [], + } + return obs_dict + + def get_bias_dict(self) -> Dict[str, Any]: + bias_dict = { + 'mkdir': [], + 'copy': [], + } + return bias_dict diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 464137d6a6..58bcf07ba3 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -170,9 +170,9 @@ def get_task_names(self): else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] - hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] if self.do_jedisnowda: - hybrid_after_eupd_tasks += ['esnowanl'] + hybrid_tasks += ['esnowanl'] + hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() From 29641b26b5584409faa2c9f1568887571fe8621e Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 20 May 2024 18:06:59 +0000 Subject: [PATCH 07/90] save --- scripts/exglobal_atmos_sfcanl.sh | 15 +++- ush/python/pygfs/task/snow_analysis.py | 120 +++++++++++++++---------- 2 files changed, 83 insertions(+), 52 deletions(-) diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh index 8ac3f285e5..b9756c81bc 100755 --- a/scripts/exglobal_atmos_sfcanl.sh +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -167,9 +167,15 @@ if [[ ${DOIAU} = "YES" ]]; then # update surface restarts at the beginning of the window, if IAU # For now assume/hold dtfanl.nc valid at beginning of window for n in $(seq 1 ${ntiles}); do - ${NCP} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + if [[ ${DO_JEDISNOWDA:-"NO"} = "YES" ]]; then + ${NCP} "${COM_SNOW_ANALYSIS}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_SNOW_ANALYSIS}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + else + ${NCP} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + fi ${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" @@ -188,11 +194,12 @@ for n in $(seq 1 ${ntiles}); do if [[ ${DO_JEDISNOWDA:-"NO"} = "YES" ]]; then ${NCP} "${COM_SNOW_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_SNOW_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" else ${NCP} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" fi - ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index a11fdcb936..dc5a215028 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -357,19 +357,20 @@ def finalize(self) -> None: FileHandler(yaml_copy).sync() logger.info("Copy analysis to COM") + bkgtimes = [] if self.task_config.DOIAU: - bkgtime = self.task_config.SNOW_WINDOW_BEGIN - else: - bkgtime = self.task_config.current_cycle - template_bkg = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' - template_anl = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + # need both beginning and middle of window + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) anllist = [] - for itile in range(1, self.task_config.ntiles + 1): - filename_anl = template_anl.format(tilenum=itile) - filename_bkg = template_bkg.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', filename_anl) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename_bkg) - anllist.append([src, dest]) + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() logger.info('Copy increments to COM') @@ -561,44 +562,67 @@ def add_increments(config: Dict) -> None: # need backgrounds to create analysis from increments after LETKF logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") + bkgtimes = [] if config.DOIAU: - bkgtime = config.SNOW_WINDOW_BEGIN - else: - bkgtime = config.current_cycle - template_bkg = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' - template_anl = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + # want analysis at beginning and middle of window + bkgtimes.append(config.SNOW_WINDOW_BEGIN) + bkgtimes.append(config.current_cycle) anllist = [] - for itile in range(1, config.ntiles + 1): - filename_bkg = template_bkg.format(tilenum=itile) - filename_anl = template_anl.format(tilenum=itile) - src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename_bkg) - dest = os.path.join(config.DATA, "anl", filename_anl) - anllist.append([src, dest]) + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(config.DATA, "anl", filename) + anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() - logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = config.APPLY_INCR_NML_TMPL - nml_data = Jinja(nml_template, config).render - logger.debug(f"apply_incr_nml:\n{nml_data}") - - nml_file = os.path.join(config.DATA, "apply_incr_nml") - with open(nml_file, "w") as fho: - fho.write(nml_data) - - logger.info("Link APPLY_INCR_EXE into DATA/") - exe_src = config.APPLY_INCR_EXE - exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - # execute APPLY_INCR_EXE to create analysis files - exe = Executable(config.APRUN_APPLY_INCR) - exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) - logger.info(f"Executing {exe}") - try: - exe() - except OSError: - raise OSError(f"Failed to execute {exe}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe}") + if config.DOIAU: + logger.info("Copying increments to beginning of window") + template_in = f'snowinc.{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, config.ntiles + 1): + filename_in = template_in.format(tilenum=itile) + filename_out = template_out.format(tilenum=itile) + src = os.path.join(config.DATA, 'anl', filename_in) + dest = os.path.join(config.DATA, 'anl', filename_out) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + + # loop over times to apply increments + for bkgtime in bkgtimes: + logger.info("Processing analysis valid: {bkgtime}") + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.APPLY_INCR_NML_TMPL + nml_config = { + 'current_cycle': bkgtime, + 'CASE': config.CASE, + 'DATA': config.DATA, + 'HOMEgfs': config.HOMEgfs, + 'OCNRES': config.OCNRES, + } + nml_data = Jinja(nml_template, nml_config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") From d57e12f814b8a58fe1492f63c0a3c8139ef028ab Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 24 May 2024 17:07:54 +0000 Subject: [PATCH 08/90] snow changes --- parm/config/gfs/config.esnowanl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index cca9ac3d89..f5157a2346 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -8,12 +8,13 @@ echo "BEGIN: config.esnowanl" # Get task specific resources source "${EXPDIR}/config.resources" esnowanl -export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" -export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/fv3jedi_land_ensrecenter.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x +export FREGRID=${EXECgfs}/fregrid.x echo "END: config.esnowanl" From 89fd461e7a1d67afb9dffc75ab57dda1f5072104 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 29 May 2024 20:43:07 +0000 Subject: [PATCH 09/90] end of day commit --- parm/gdas/snow_stage_ens_update.yaml.j2 | 17 +++++++++++++++++ sorc/link_workflow.sh | 3 ++- sorc/ufs_utils.fd | 2 +- ush/python/pygfs/task/snowens_analysis.py | 7 ++++--- 4 files changed, 24 insertions(+), 5 deletions(-) create mode 100644 parm/gdas/snow_stage_ens_update.yaml.j2 diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 new file mode 100644 index 0000000000..44ac86e5f0 --- /dev/null +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -0,0 +1,17 @@ +# create working directories +mkdir: +- "{{ DATA }}/bkg/det" +- "{{ DATA }}/bkg/det_ensres" +- "{{ DATA }}/inc/det" +- "{{ DATA }}/inc/det_ensres" +{% for mem in range(1, nmem_ens + 1) %} +- "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" +{% endfor %} +copy: +# copy deterministic background files + +# copy deterministic increment files +{% for tile in range(1, 7) %} +- ["", "{{ DATA }}/inc/det/snowinc.{{ current_cycle }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +# copy ensemble background files \ No newline at end of file diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index c5d7243e8f..9aecc30ed5 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -302,7 +302,7 @@ ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/ufs_model.x" . [[ -s "upp.x" ]] && rm -f upp.x ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/exec/upp.x" . -for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do +for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle fregrid; do [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . done @@ -348,6 +348,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ "gdas_socahybridweights.x" \ + "gdasapp_land_ensrecenter.x" \ "bufr2ioda.x" \ "calcfIMS.exe" \ "apply_incr.exe" ) diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index f42fae239d..c6e032b96b 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit f42fae239d0824f7b9a83c9afdc3d980894c7df8 +Subproject commit c6e032b96bc51e901f3780a624c6798ede688e2c diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index a846981691..ad679298c2 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -30,15 +30,16 @@ class SnowEnsAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config['CASE'][1:]) + _res_det = int(self.config['CASE'][1:]) + _res_ens = int(self.config['CASE_ENS'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) _recenter_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.land_recenter.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { - 'npx_ges': _res + 1, - 'npy_ges': _res + 1, + 'npx_ges': _res_ens + 1, + 'npy_ges': _res_ens + 1, 'npz_ges': self.config.LEVS - 1, 'npz': self.config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, From f26822e2723418cda5dd195dd77647b860f72009 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 18:51:29 +0000 Subject: [PATCH 10/90] stage files is working --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 1 + parm/gdas/snow_stage_ens_update.yaml.j2 | 90 ++++++++++++++++++++++- ush/python/pygfs/task/snowens_analysis.py | 5 ++ 3 files changed, 93 insertions(+), 3 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index ab6232555f..98082ca94e 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -14,6 +14,7 @@ gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" GDUMP_ENS="enkf${GDUMP}" +export GDUMP ############################################## # Begin JOB SPECIFIC work diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 44ac86e5f0..1d4fdcc15a 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -1,17 +1,101 @@ +###################################### +# set some variables +###################################### +{% set gPDY = previous_cycle | to_YMD %} +{% set gcyc = previous_cycle | strftime("%H") %} +{% set PDY = current_cycle | to_YMD %} +{% set cyc = current_cycle | strftime("%H") %} +{% if DOIAU %} +{% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} +{% else %} +{% set bkg_time = current_cycle | to_fv3time %} +{% endif %} +###################################### # create working directories +###################################### mkdir: - "{{ DATA }}/bkg/det" - "{{ DATA }}/bkg/det_ensres" - "{{ DATA }}/inc/det" - "{{ DATA }}/inc/det_ensres" -{% for mem in range(1, nmem_ens + 1) %} +{% for mem in range(1, NMEM_ENS + 1) %} - "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" {% endfor %} copy: +###################################### # copy deterministic background files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':GDUMP, + 'YMD':gPDY, + 'HH':gcyc, + 'MEMDIR':""} %} +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_prev_ns = namespace(COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_prev_ns.COM_ATMOS_RESTART_MEM = com_prev_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} +{% endfor %} +{% for tile in range(1, 7) %} +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### # copy deterministic increment files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':GDUMP, + 'YMD':PDY, + 'HH':cyc, + 'MEMDIR':""} %} + +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} +{% endfor %} +{% for tile in range(1, 7) %} +- ["{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### +# copy ensemble background files +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':gPDY, + 'HH':gcyc, + 'MEMDIR':"mem" + '%03d' % mem} %} + +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_prev_ns = namespace(COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_prev_ns.COM_ATMOS_RESTART_MEM = com_prev_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} +{% endfor %} {% for tile in range(1, 7) %} -- ["", "{{ DATA }}/inc/det/snowinc.{{ current_cycle }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} -# copy ensemble background files \ No newline at end of file +{% endfor %} \ No newline at end of file diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index ad679298c2..3f2954fb9c 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -69,6 +69,11 @@ def initialize(self) -> None: super().initialize() + # stage files + logger.info(f"Staging files from {self.task_config.SNOW_ENS_STAGE_TMPL}") + snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) + FileHandler(snow_stage_list).sync() + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") From 2460af19175edd13385c98b2ed67e11c5309e471 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 20:00:53 +0000 Subject: [PATCH 11/90] save updates --- parm/config/gfs/config.esnowanl | 3 +++ ush/python/pygfs/task/snowens_analysis.py | 15 ++++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index f5157a2346..28c8f6460b 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -11,6 +11,9 @@ source "${EXPDIR}/config.resources" esnowanl export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 3f2954fb9c..8ab23f3c27 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -69,14 +69,27 @@ def initialize(self) -> None: super().initialize() - # stage files + # stage background and increment files logger.info(f"Staging files from {self.task_config.SNOW_ENS_STAGE_TMPL}") snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) FileHandler(snow_stage_list).sync() + # stage fix files for fv3-jedi + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_list).sync() + + # write land ensemble recentering YAML save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") + # link recentering executable + # placeholder, currently already done by the analysis parent class + + # copy fregrid executable + fregrid_copy = {'copy': [os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]} + FileHandler(fregrid_copy).sync() + @logit(logger) def execute(self) -> None: """Run a series of tasks to create snow ensemble analysis From 057996daf0870ebb19ee83664ec49fc7472c00fc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 20:09:11 +0000 Subject: [PATCH 12/90] more updates --- ush/python/pygfs/task/snowens_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 8ab23f3c27..d664550569 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -87,7 +87,7 @@ def initialize(self) -> None: # placeholder, currently already done by the analysis parent class # copy fregrid executable - fregrid_copy = {'copy': [os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]} + fregrid_copy = {'copy': [[os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]]} FileHandler(fregrid_copy).sync() @logit(logger) From 146dc60f7061b1a4016456bc5d33c9b13f5fdf4e Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 20:12:52 +0000 Subject: [PATCH 13/90] make norm happy --- ush/python/pygfs/task/snow_analysis.py | 2 +- ush/python/pygfs/task/snowens_analysis.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index dc5a215028..bb4cc6d49e 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -360,7 +360,7 @@ def finalize(self) -> None: bkgtimes = [] if self.task_config.DOIAU: # need both beginning and middle of window - bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) bkgtimes.append(self.task_config.current_cycle) anllist = [] for bkgtime in bkgtimes: diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index d664550569..2f1ec561bc 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -77,7 +77,7 @@ def initialize(self) -> None: # stage fix files for fv3-jedi logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() + FileHandler(jedi_fix_list).sync() # write land ensemble recentering YAML save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) @@ -106,7 +106,7 @@ def execute(self) -> None: def finalize(self) -> None: """Performs closing actions of the snow ensemble analysis task This method: - - + - does nothing yet Parameters ---------- @@ -114,7 +114,6 @@ def finalize(self) -> None: Instance of the SnowEnsAnalysis object """ - @staticmethod @logit(logger) def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: @@ -237,7 +236,7 @@ def get_obs_dict(self) -> Dict[str, Any]: 'copy': [], } return obs_dict - + def get_bias_dict(self) -> Dict[str, Any]: bias_dict = { 'mkdir': [], From d7afb7b0e8735c5c6ad3e2b3d22dffdecb8f0e71 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 20:19:43 +0000 Subject: [PATCH 14/90] norm is an angry old man --- workflow/rocoto/gfs_tasks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 5a5ec14d04..38f6ada64f 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -627,7 +627,6 @@ def esnowanl(self): task = rocoto.create_task(task_dict) return task - def prepoceanobs(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) From eb5716da0e853e9f269a80798e0c28051827a025 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 30 May 2024 20:27:48 +0000 Subject: [PATCH 15/90] remove esnowanl from enkfgfs --- workflow/applications/gfs_cycled.py | 1 + 1 file changed, 1 insertion(+) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 575d5c4842..6e63e3d2d6 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -295,6 +295,7 @@ def get_task_names(self): if self.do_hybvar and 'gfs' in self.eupd_cdumps: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") + enkfgfs_tasks.remove("esnowanl") tasks['enkfgfs'] = enkfgfs_tasks return tasks From 6a57a644ce992d4a33ab4c782c00df18e1cce30d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 31 May 2024 21:20:07 +0000 Subject: [PATCH 16/90] end of day commit --- parm/config/gfs/config.esnowanl | 1 + parm/gdas/snow_stage_orog.yaml.j2 | 12 +++ scripts/exglobal_snow_ensemble_analysis.py | 4 +- ush/python/pygfs/task/snowens_analysis.py | 96 ++++++++++++++++------ 4 files changed, 86 insertions(+), 27 deletions(-) create mode 100644 parm/gdas/snow_stage_orog.yaml.j2 diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index 28c8f6460b..5e814decaa 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -13,6 +13,7 @@ export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" +export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 new file mode 100644 index 0000000000..9d66fa6fe6 --- /dev/null +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -0,0 +1,12 @@ +mkdir: +- "{{ DATA }}/orog/det" +- "{{ DATA }}/orog/ens" +copy: +- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] +- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] +{% for tile in range(1, 7) %} +- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +{% endfor %} \ No newline at end of file diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py index 0fdd4de152..66c23d48ac 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -20,5 +20,7 @@ # Instantiate the snow ensemble analysis task anl = SnowEnsAnalysis(config) anl.initialize() - anl.execute() + anl.regridDetBkg() + anl.regridDetInc() + anl.recenterEns() anl.finalize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 2f1ec561bc..0dc953c099 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -10,7 +10,7 @@ from wxflow import (AttrDict, FileHandler, to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, - rm_p, + rm_p, chdir, parse_j2yaml, save_as_yaml, Jinja, logit, @@ -51,6 +51,8 @@ def __init__(self, config): 'jedi_yaml': _recenter_yaml, } ) + bkg_time = _window_begin if self.config.DOIAU else self.runtime_config.current_cycle + local_dict['bkg_time'] = bkg_time # task_config is everything that this task should need self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) @@ -74,6 +76,11 @@ def initialize(self) -> None: snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) FileHandler(snow_stage_list).sync() + # stage orography files + logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") + snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) + FileHandler(snow_orog_stage_list).sync() + # stage fix files for fv3-jedi logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) @@ -91,10 +98,66 @@ def initialize(self) -> None: FileHandler(fregrid_copy).sync() @logit(logger) - def execute(self) -> None: - """Run a series of tasks to create snow ensemble analysis - This method: + def regridDetBkg(self) -> None: + """Run fregrid to regrid the deterministic snow background + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + # below is an example of running from jiarui + """ + fregrid --input_mosaic ${orogdir}/C96/C96_mosaic.nc --input_dir ${input_dir} --input_file ${input_file} --scalar_field snodl + --output_dir ${output_dir} --output_file ${output_file} --output_mosaic ${orogdir}/C48/C48_mosaic.nc --interp_method conserve_order1 + --weight_file ${orogdir}/C96/C96.mx500_oro_data --weight_field land_frac + """ + chdir(self.task_config.DATA) + + exec_name = os.path.join(self.task_config.DATA, 'fregrid.x') + exec_cmd = Executable(exec_name) + # why does below not work + # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc") + # exec_cmd.add_default_arg(f"--input_dir ./bkg/det/") + # exec_cmd.add_default_arg(f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data") + # exec_cmd.add_default_arg(f"--scalar_field snodl") + # exec_cmd.add_default_arg(f"--output_dir ./bkg/det_ensres/") + # exec_cmd.add_default_arg(f"--output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data") + # exec_cmd.add_default_arg(f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc") + # exec_cmd.add_default_arg(f"--interp_method conserve_order1") + # exec_cmd.add_default_arg(f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data") + # exec_cmd.add_default_arg(f"--weight_field land_frac") + # below does not work either, does this stupid code need a shell script constructed that calls this??? + # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc --input_dir ./bkg/det/ --input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data --scalar_field snodl --output_dir ./bkg/det_ensres/ --output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data --output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc --interp_method conserve_order1 --weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data --weight_field land_frac") + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + @logit(logger) + def regridDetInc(self) -> None: + """Run fregrid to regrid the deterministic snow increment + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + @logit(logger) + def recenterEns(self) -> None: + """Run recentering code to create an ensemble of snow increments + based on the deterministic increment, and the difference + between the determinstic and ensemble mean forecast Parameters ---------- @@ -137,29 +200,10 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: bkg_dict: Dict a dictionary containing the list of model background files to copy for FileHandler """ - # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - - # get FV3 sfc_data RESTART files, this will be a lot simpler when using history files - rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) # for now, option later? - run_dir = os.path.join(config.DATA, 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # snow DA needs coupler - basename = f'{to_fv3time(config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # snow DA only needs sfc_data - for ftype in ['sfc_data']: - template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - + bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist + 'mkdir': [], + 'copy': [], } return bkg_dict From ebb3c319d74a55697017c7272ba2f5d102df3b6f Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 3 Jun 2024 19:52:28 +0000 Subject: [PATCH 17/90] fregrid working but seems a bit hacky --- ush/python/pygfs/task/snowens_analysis.py | 52 +++++++++++++++++++---- 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 0dc953c099..603d461f04 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -108,16 +108,24 @@ def regridDetBkg(self) -> None: Instance of the SnowEnsAnalysis object """ - # below is an example of running from jiarui - """ - fregrid --input_mosaic ${orogdir}/C96/C96_mosaic.nc --input_dir ${input_dir} --input_file ${input_file} --scalar_field snodl - --output_dir ${output_dir} --output_file ${output_file} --output_mosaic ${orogdir}/C48/C48_mosaic.nc --interp_method conserve_order1 - --weight_file ${orogdir}/C96/C96.mx500_oro_data --weight_field land_frac - """ chdir(self.task_config.DATA) - exec_name = os.path.join(self.task_config.DATA, 'fregrid.x') - exec_cmd = Executable(exec_name) + #exec_name = os.path.join(self.task_config.DATA, 'fregrid.x') + #exec_cmd = Executable(exec_name) + arg_list = [ + f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", + f"--input_dir ./bkg/det/", + f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", + f"--scalar_field snodl", + f"--output_dir ./bkg/det_ensres/", + f"--output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data", + f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + f"--interp_method conserve_order1", + f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", + f"--weight_field land_frac", + ] + fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + exec_cmd = Executable(fregrid) # why does below not work # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc") # exec_cmd.add_default_arg(f"--input_dir ./bkg/det/") @@ -131,6 +139,7 @@ def regridDetBkg(self) -> None: # exec_cmd.add_default_arg(f"--weight_field land_frac") # below does not work either, does this stupid code need a shell script constructed that calls this??? # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc --input_dir ./bkg/det/ --input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data --scalar_field snodl --output_dir ./bkg/det_ensres/ --output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data --output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc --interp_method conserve_order1 --weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data --weight_field land_frac") + # doing exec_cmd(arg_list) also does not work, but I have a hacky solution working... try: logger.debug(f"Executing {exec_cmd}") @@ -153,6 +162,33 @@ def regridDetInc(self) -> None: Instance of the SnowEnsAnalysis object """ + chdir(self.task_config.DATA) + + arg_list = [ + f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", + f"--input_dir ./inc/det/", + f"--input_file snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data", + f"--scalar_field snodl", + f"--output_dir ./inc/det_ensres/", + f"--output_file snowinc.{to_fv3time(self.task_config.current_cycle)}.ensres.sfc_data", + f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + f"--interp_method conserve_order1", + f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", + f"--weight_field land_frac", + ] + fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + exec_cmd = Executable(fregrid) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + @logit(logger) def recenterEns(self) -> None: """Run recentering code to create an ensemble of snow increments From 2bdee22d0f2b178ed107a6ba41945e0ee743a665 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 3 Jun 2024 19:54:52 +0000 Subject: [PATCH 18/90] norms and remove commented out code --- ush/python/pygfs/task/snowens_analysis.py | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 603d461f04..ad9fdde41f 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -110,8 +110,6 @@ def regridDetBkg(self) -> None: chdir(self.task_config.DATA) - #exec_name = os.path.join(self.task_config.DATA, 'fregrid.x') - #exec_cmd = Executable(exec_name) arg_list = [ f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", f"--input_dir ./bkg/det/", @@ -126,20 +124,6 @@ def regridDetBkg(self) -> None: ] fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) exec_cmd = Executable(fregrid) - # why does below not work - # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc") - # exec_cmd.add_default_arg(f"--input_dir ./bkg/det/") - # exec_cmd.add_default_arg(f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data") - # exec_cmd.add_default_arg(f"--scalar_field snodl") - # exec_cmd.add_default_arg(f"--output_dir ./bkg/det_ensres/") - # exec_cmd.add_default_arg(f"--output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data") - # exec_cmd.add_default_arg(f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc") - # exec_cmd.add_default_arg(f"--interp_method conserve_order1") - # exec_cmd.add_default_arg(f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data") - # exec_cmd.add_default_arg(f"--weight_field land_frac") - # below does not work either, does this stupid code need a shell script constructed that calls this??? - # exec_cmd.add_default_arg(f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc --input_dir ./bkg/det/ --input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data --scalar_field snodl --output_dir ./bkg/det_ensres/ --output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data --output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc --interp_method conserve_order1 --weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data --weight_field land_frac") - # doing exec_cmd(arg_list) also does not work, but I have a hacky solution working... try: logger.debug(f"Executing {exec_cmd}") @@ -236,7 +220,7 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: bkg_dict: Dict a dictionary containing the list of model background files to copy for FileHandler """ - + bkg_dict = { 'mkdir': [], 'copy': [], From 42b3fbe4028c7ee37953701c1e292eb3a91c13ec Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 6 Jun 2024 15:58:56 +0000 Subject: [PATCH 19/90] update snow ens --- scripts/exglobal_snow_ensemble_analysis.py | 1 + ush/python/pygfs/task/snowens_analysis.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py index 66c23d48ac..2c40d31032 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -23,4 +23,5 @@ anl.regridDetBkg() anl.regridDetInc() anl.recenterEns() + anl.addIncrements() anl.finalize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index ad9fdde41f..81eef768cd 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -184,6 +184,19 @@ def recenterEns(self) -> None: self : Analysis Instance of the SnowEnsAnalysis object """ + logger.info("Running recentering code") + exec_cmd = Executable(localconf.APRUN_ESNOWANL) + exec_name = os.path.join(localconf.DATA, 'gdasapp_land_ensrecenter.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(localconf.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") @logit(logger) def finalize(self) -> None: From f3802239652615eb833d2251a5dd8348384a212c Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 6 Jun 2024 17:24:19 +0000 Subject: [PATCH 20/90] update some submodules --- sorc/gdas.cd | 2 +- sorc/ufs_utils.fd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 3e50a8fdcd..e1ed25d5b4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 3e50a8fdcd07305a3464a02e20eaf4b033179167 +Subproject commit e1ed25d5b4852482a2895db6348ea1eb1d9f96b1 diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index f42fae239d..2794d413d0 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit f42fae239d0824f7b9a83c9afdc3d980894c7df8 +Subproject commit 2794d413d083b43d9ba37a15375d5c61b610d29e From 73894dc76c6c57a7ea2af541e2d226bddb9c7119 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 6 Jun 2024 20:13:54 +0000 Subject: [PATCH 21/90] update some things --- env/HERA.env | 10 ++++++++++ env/HERCULES.env | 10 ++++++++++ env/JET.env | 10 ++++++++++ env/ORION.env | 10 ++++++++++ env/S4.env | 10 ++++++++++ env/WCOSS2.env | 10 ++++++++++ ush/python/pygfs/task/snowens_analysis.py | 6 +++--- 7 files changed, 63 insertions(+), 3 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index ccaaea32e7..cd6842ff4e 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -112,6 +112,16 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowanl" ]]; then + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "ocnanalbmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" diff --git a/env/HERCULES.env b/env/HERCULES.env index 0824ba913a..08ab227d51 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -109,6 +109,16 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n 6" ;; + "esnowanl") + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + ;; "ocnanalbmat") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/JET.env b/env/JET.env index 5bd88dc93a..5c8f423818 100755 --- a/env/JET.env +++ b/env/JET.env @@ -92,6 +92,16 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowanl" ]]; then + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) diff --git a/env/ORION.env b/env/ORION.env index f701e55aa2..2b9c57914b 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -100,6 +100,16 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowanl" ]]; then + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) diff --git a/env/S4.env b/env/S4.env index 9ba3a61b01..9820c743bb 100755 --- a/env/S4.env +++ b/env/S4.env @@ -92,6 +92,16 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowanl" ]]; then + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 0876e4127d..866425feb8 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -86,6 +86,16 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowanl" ]]; then + + nth_max=$((npe_node_max / npe_node_esnowanl)) + + export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} + [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} + export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 81eef768cd..c18bfe1473 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -185,10 +185,10 @@ def recenterEns(self) -> None: Instance of the SnowEnsAnalysis object """ logger.info("Running recentering code") - exec_cmd = Executable(localconf.APRUN_ESNOWANL) - exec_name = os.path.join(localconf.DATA, 'gdasapp_land_ensrecenter.x') + exec_cmd = Executable(self.task_config.APRUN_ESNOWANL) + exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(localconf.jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") From 39624f8d03c3e5f3a93b8e1a8d2f8367f2786347 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 14:36:55 +0000 Subject: [PATCH 22/90] update snow stage yaml --- parm/gdas/snow_stage_ens_update.yaml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 1d4fdcc15a..9a2e095785 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -18,6 +18,7 @@ mkdir: - "{{ DATA }}/bkg/det_ensres" - "{{ DATA }}/inc/det" - "{{ DATA }}/inc/det_ensres" +- "{{ DATA }}//inc/ensmean {% for mem in range(1, NMEM_ENS + 1) %} - "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" {% endfor %} From 758287dc77dbaea0c8347ed670cc5f4bd18ced84 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 14:39:28 +0000 Subject: [PATCH 23/90] whoops --- parm/gdas/snow_stage_ens_update.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 9a2e095785..43430ca682 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -18,7 +18,7 @@ mkdir: - "{{ DATA }}/bkg/det_ensres" - "{{ DATA }}/inc/det" - "{{ DATA }}/inc/det_ensres" -- "{{ DATA }}//inc/ensmean +- "{{ DATA }}//inc/ensmean" {% for mem in range(1, NMEM_ENS + 1) %} - "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" {% endfor %} From b65f381fb3e50b940cfe70a4a78882288f448d99 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 15:39:18 +0000 Subject: [PATCH 24/90] hack increment times --- parm/gdas/snow_stage_ens_update.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 43430ca682..9d54fc03f7 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -71,7 +71,7 @@ copy: {% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} {% for tile in range(1, 7) %} -- ["{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### # copy ensemble background files From fd15a24ff50c36334fe08d89db9269404d4f16b8 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 15:43:46 +0000 Subject: [PATCH 25/90] dates for increment --- ush/python/pygfs/task/snowens_analysis.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index c18bfe1473..0f5fa2ed86 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -151,10 +151,10 @@ def regridDetInc(self) -> None: arg_list = [ f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", f"--input_dir ./inc/det/", - f"--input_file snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data", + f"--input_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--scalar_field snodl", f"--output_dir ./inc/det_ensres/", - f"--output_file snowinc.{to_fv3time(self.task_config.current_cycle)}.ensres.sfc_data", + f"--output_file snowinc.{to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", From 4cfbe312ab1b6825b461b398c6dc9ea94846c947 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 15:48:41 +0000 Subject: [PATCH 26/90] fregrid names --- ush/python/pygfs/task/snowens_analysis.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 0f5fa2ed86..18a0765e1c 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -116,7 +116,7 @@ def regridDetBkg(self) -> None: f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--scalar_field snodl", f"--output_dir ./bkg/det_ensres/", - f"--output_file {to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data", + f"--output_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", @@ -154,7 +154,7 @@ def regridDetInc(self) -> None: f"--input_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--scalar_field snodl", f"--output_dir ./inc/det_ensres/", - f"--output_file snowinc.{to_fv3time(self.task_config.bkg_time)}.ensres.sfc_data", + f"--output_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", From 9145004952cfbe3ea09ae4aa0006c5c907025e5d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 18:34:05 +0000 Subject: [PATCH 27/90] updates to test --- parm/config/gfs/config.esnowanl | 4 ++ parm/gdas/snow_stage_ens_update.yaml.j2 | 3 ++ scripts/exglobal_snow_ensemble_analysis.py | 2 +- ush/python/pygfs/task/snowens_analysis.py | 45 +++++++++++++++------- 4 files changed, 39 insertions(+), 15 deletions(-) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index 5e814decaa..7d67ddac16 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -15,6 +15,10 @@ export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 9d54fc03f7..d4b03102d2 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -21,6 +21,7 @@ mkdir: - "{{ DATA }}//inc/ensmean" {% for mem in range(1, NMEM_ENS + 1) %} - "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" +- "{{ DATA }}/anl/mem{{ '%03d' % mem }}" {% endfor %} copy: ###################################### @@ -96,7 +97,9 @@ copy: {% set replace_term = tmpl_dict[key] %} {% set com_prev_ns.COM_ATMOS_RESTART_MEM = com_prev_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} {% endfor %} +# we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, 7) %} - ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endfor %} \ No newline at end of file diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py index 2c40d31032..f4a8ffbb7d 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -23,5 +23,5 @@ anl.regridDetBkg() anl.regridDetInc() anl.recenterEns() - anl.addIncrements() + anl.addEnsIncrements() anl.finalize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 18a0765e1c..b984a2fe81 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -114,7 +114,7 @@ def regridDetBkg(self) -> None: f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", f"--input_dir ./bkg/det/", f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--scalar_field snodl", + f"--scalar_field snodl,slmsk", f"--output_dir ./bkg/det_ensres/", f"--output_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", @@ -210,6 +210,35 @@ def finalize(self) -> None: Instance of the SnowEnsAnalysis object """ + @staticmethod + @logit(logger) + def addEnsIncrements(self) -> None: + """Loop through all ensemble members and apply increment to create + a surface analysis for snow + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + for mem in range(1, self.task_config.NMEM_ENS + 1): + # for now, just looping serially, should parallelize this eventually + logger.info(f"Now applying increment to member mem{mem:03}") + chdir(os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")) + memdict = { + 'HOMEgfs': self.task_config.HOMEgfs, + 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), + 'current_cycle': self.task_config.bkg_time, + 'CASE_ENS': self.task_config.CASE_ENS, + 'OCNRES': self.task_config.OCNRES, + 'ntiles': 6, + 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, + 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, + 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, + } + self.add_increments(memdict) + + @staticmethod @logit(logger) def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: @@ -251,7 +280,6 @@ def add_increments(config: Dict) -> None: Dictionary of key-value pairs needed in this method Should contain the following keys: HOMEgfs - COM_ATMOS_RESTART_PREV DATA current_cycle CASE @@ -269,19 +297,8 @@ def add_increments(config: Dict) -> None: All other exceptions """ - # need backgrounds to create analysis from increments after LETKF - logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") - template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' - anllist = [] - for itile in range(1, config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) - dest = os.path.join(config.DATA, "anl", filename) - anllist.append([src, dest]) - FileHandler({'copy': anllist}).sync() - logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = config.APPLY_INCR_NML_TMPL + nml_template = config.ENS_APPLY_INCR_NML_TMPL nml_data = Jinja(nml_template, config).render logger.debug(f"apply_incr_nml:\n{nml_data}") From aeb3f792244b3990f7081d6ab10a11ae487611fc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 18:53:07 +0000 Subject: [PATCH 28/90] fix method call --- scripts/exglobal_snow_ensemble_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exglobal_snow_ensemble_analysis.py index 2c40d31032..f4a8ffbb7d 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exglobal_snow_ensemble_analysis.py @@ -23,5 +23,5 @@ anl.regridDetBkg() anl.regridDetInc() anl.recenterEns() - anl.addIncrements() + anl.addEnsIncrements() anl.finalize() From f23aa20ee9d4b75a629ee151f0dea1b1e59aac65 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 18:55:41 +0000 Subject: [PATCH 29/90] coding norms --- ush/python/pygfs/task/snowens_analysis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index b984a2fe81..06fa6be764 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -238,7 +238,6 @@ def addEnsIncrements(self) -> None: } self.add_increments(memdict) - @staticmethod @logit(logger) def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: From 4ba1cdc165786a1d3f86c189223239465671444e Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 20:32:13 +0000 Subject: [PATCH 30/90] end of week commit --- parm/config/gfs/config.esnowanl | 2 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snowens_analysis.py | 32 ++++++++++++++--------- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index 7d67ddac16..a1fea1b4cf 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -17,7 +17,7 @@ export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" -export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" +export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/sorc/gdas.cd b/sorc/gdas.cd index e1ed25d5b4..ebb9c7d4f4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit e1ed25d5b4852482a2895db6348ea1eb1d9f96b1 +Subproject commit ebb9c7d4f4896cf7927e1f4a180990b6428b02ac diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 06fa6be764..c33ecd68ee 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -210,7 +210,6 @@ def finalize(self) -> None: Instance of the SnowEnsAnalysis object """ - @staticmethod @logit(logger) def addEnsIncrements(self) -> None: """Loop through all ensemble members and apply increment to create @@ -224,18 +223,22 @@ def addEnsIncrements(self) -> None: for mem in range(1, self.task_config.NMEM_ENS + 1): # for now, just looping serially, should parallelize this eventually logger.info(f"Now applying increment to member mem{mem:03}") - chdir(os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")) - memdict = { - 'HOMEgfs': self.task_config.HOMEgfs, - 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), - 'current_cycle': self.task_config.bkg_time, - 'CASE_ENS': self.task_config.CASE_ENS, - 'OCNRES': self.task_config.OCNRES, - 'ntiles': 6, - 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, - 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, - 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, - } + logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') + memdict = AttrDict( + { + 'HOMEgfs': self.task_config.HOMEgfs, + 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), + 'DATAROOT': self.task_config.DATA, + 'current_cycle': self.task_config.bkg_time, + 'CASE_ENS': self.task_config.CASE_ENS, + 'OCNRES': self.task_config.OCNRES, + 'ntiles': 6, + 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, + 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, + 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, + 'MYMEM': f"{mem:03}", + } + ) self.add_increments(memdict) @staticmethod @@ -280,6 +283,7 @@ def add_increments(config: Dict) -> None: Should contain the following keys: HOMEgfs DATA + DATAROOT current_cycle CASE OCNRES @@ -295,6 +299,7 @@ def add_increments(config: Dict) -> None: WorkflowException All other exceptions """ + os.chdir(config.DATA) logger.info("Create namelist for APPLY_INCR_EXE") nml_template = config.ENS_APPLY_INCR_NML_TMPL @@ -313,6 +318,7 @@ def add_increments(config: Dict) -> None: os.symlink(exe_src, exe_dest) # execute APPLY_INCR_EXE to create analysis files + print(os.getcwd()) exe = Executable(config.APRUN_APPLY_INCR) exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) logger.info(f"Executing {exe}") From d0d711a1655e62bcec6640e0fe4ffe3b658f2621 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 7 Jun 2024 20:37:29 +0000 Subject: [PATCH 31/90] one more end of week commit --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index ebb9c7d4f4..e93e79dd26 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit ebb9c7d4f4896cf7927e1f4a180990b6428b02ac +Subproject commit e93e79dd26bb20092341741de968f53456662632 From 18e7bd45c90ba87c44e78b904864d6c51d5efa3f Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 10 Jun 2024 19:18:36 +0000 Subject: [PATCH 32/90] smore changes --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 2 +- parm/config/gfs/config.esnowanl | 1 + parm/gdas/snow_finalize_ens_update.yaml.j2 | 63 ++++++++++++++++++++++ ush/python/pygfs/task/snowens_analysis.py | 7 ++- 4 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 parm/gdas/snow_finalize_ens_update.yaml.j2 diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index 98082ca94e..ebc815e94e 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -21,7 +21,7 @@ export GDUMP ############################################## # Generate COM variables from templates YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_ATMOS_ANALYSIS COM_CONF -YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index 7d67ddac16..2fd1e8fb3e 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -14,6 +14,7 @@ export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" +export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 new file mode 100644 index 0000000000..ad34809805 --- /dev/null +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -0,0 +1,63 @@ +###################################### +# set some variables +###################################### +{% set PDY = current_cycle | to_YMD %} +{% set cyc = current_cycle | strftime("%H") %} +{% if DOIAU %} +{% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} +{% else %} +{% set bkg_time = current_cycle | to_fv3time %} +{% endif %} +copy: +###################################### +# copy analyses to directories +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':PDY, + 'HH':cyc, + 'MEMDIR':"mem" + '%03d' % mem} %} + +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} +{% endfor %} +{% for tile in range(1, 7) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% endfor %} +###################################### +# copy ensemble mean increment to COM +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':PDY, + 'HH':cyc, + 'MEMDIR':"ensmean"} %} + +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} +{% endfor %} +{% for tile in range(1, 7) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 06fa6be764..fa1bccc88e 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -202,13 +202,18 @@ def recenterEns(self) -> None: def finalize(self) -> None: """Performs closing actions of the snow ensemble analysis task This method: - - does nothing yet + - copies the ensemble snow analyses to the proper locations + - copies the ensemble mean increment to COM Parameters ---------- self : Analysis Instance of the SnowEnsAnalysis object """ + # save files to COM + logger.info(f"Copying files described in {self.task_config.SNOW_ENS_FINALIZE_TMPL}") + snow_final_list = parse_j2yaml(self.task_config.SNOW_ENS_FINALIZE_TMPL, self.task_config) + FileHandler(snow_final_list).sync() @staticmethod @logit(logger) From b029f90d2ce82af48c92e64d438b3ab2f31b5e33 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 10 Jun 2024 19:32:19 +0000 Subject: [PATCH 33/90] ensmean to ensstat --- parm/gdas/snow_finalize_ens_update.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index ad34809805..f92278398c 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -45,7 +45,7 @@ copy: 'RUN':RUN, 'YMD':PDY, 'HH':cyc, - 'MEMDIR':"ensmean"} %} + 'MEMDIR':"ensstat"} %} # Replace template variables with tmpl_dict, one key at a time # This must be done in a namespace to overcome jinja scoping From 18aaafa4dcfc687697367b7392f80b2392c415a8 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 10 Jun 2024 20:20:31 +0000 Subject: [PATCH 34/90] IAU changes --- jobs/JGLOBAL_PREP_SNOW_OBS | 6 +++ jobs/JGLOBAL_SNOW_ANALYSIS | 6 +++ jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 6 +++ parm/gdas/snow_stage_ens_update.yaml.j2 | 12 ++++- ush/python/pygfs/task/snowens_analysis.py | 62 +++++++++++++++-------- 5 files changed, 70 insertions(+), 22 deletions(-) diff --git a/jobs/JGLOBAL_PREP_SNOW_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS index f5ea3fc122..f3d310eec8 100755 --- a/jobs/JGLOBAL_PREP_SNOW_OBS +++ b/jobs/JGLOBAL_PREP_SNOW_OBS @@ -41,4 +41,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + exit 0 diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index b7d8c37060..462108f6af 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -44,4 +44,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + exit 0 diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index ebc815e94e..c858eb54a7 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -50,4 +50,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + exit 0 diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index d4b03102d2..22fc1bf44d 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -99,7 +99,15 @@ copy: {% endfor %} # we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, 7) %} -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} +{% if DOIAU %} +# if using IAU, also need backgrounds copied at the beginning of the window +# we need to copy them to two places, one serves as the basis for the analysis +{% for tile in range(1, 7) %} +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% endif %} {% endfor %} \ No newline at end of file diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 5779d4a405..a874b46f82 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -225,26 +225,48 @@ def addEnsIncrements(self) -> None: self : Analysis Instance of the SnowEnsAnalysis object """ - for mem in range(1, self.task_config.NMEM_ENS + 1): - # for now, just looping serially, should parallelize this eventually - logger.info(f"Now applying increment to member mem{mem:03}") - logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') - memdict = AttrDict( - { - 'HOMEgfs': self.task_config.HOMEgfs, - 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), - 'DATAROOT': self.task_config.DATA, - 'current_cycle': self.task_config.bkg_time, - 'CASE_ENS': self.task_config.CASE_ENS, - 'OCNRES': self.task_config.OCNRES, - 'ntiles': 6, - 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, - 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, - 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, - 'MYMEM': f"{mem:03}", - } - ) - self.add_increments(memdict) + + bkg_times = [] + # no matter what, we want to process the center of the window + bkg_times.append(self.task_config.current_cycle) + # if DOIAU, we need to copy the increment to be valid at the center of the window + # and compute the analysis there to restart the model + if self.task_config.DOIAU: + logger.info("Copying increments to beginning of window") + template_in = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, 7): + filename_in = template_in.format(tilenum=itile) + filename_out = template_out.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) + dest = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_out) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + # if running with IAU, we also need an analysis at the beginning of the window + bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) + + for bkg_time in bkg_times: + for mem in range(1, self.task_config.NMEM_ENS + 1): + # for now, just looping serially, should parallelize this eventually + logger.info(f"Now applying increment to member mem{mem:03}") + logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') + memdict = AttrDict( + { + 'HOMEgfs': self.task_config.HOMEgfs, + 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), + 'DATAROOT': self.task_config.DATA, + 'current_cycle': bkg_time, + 'CASE_ENS': self.task_config.CASE_ENS, + 'OCNRES': self.task_config.OCNRES, + 'ntiles': 6, + 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, + 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, + 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, + 'MYMEM': f"{mem:03}", + } + ) + self.add_increments(memdict) @staticmethod @logit(logger) From 135c57f3f61f24a02dcbe4c3fec5208fce916037 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 10 Jun 2024 20:43:23 +0000 Subject: [PATCH 35/90] coding norms + more IAU fixes --- parm/gdas/snow_finalize_ens_update.yaml.j2 | 21 ++++++++++++++------- ush/python/pygfs/task/snowens_analysis.py | 4 ++-- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index f92278398c..3508a75289 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -3,11 +3,6 @@ ###################################### {% set PDY = current_cycle | to_YMD %} {% set cyc = current_cycle | strftime("%H") %} -{% if DOIAU %} -{% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} -{% else %} -{% set bkg_time = current_cycle | to_fv3time %} -{% endif %} copy: ###################################### # copy analyses to directories @@ -33,8 +28,14 @@ copy: {% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} {% for tile in range(1, 7) %} -- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} +{% if DOIAU %} +# if using IAU, also need analyses copied at the beginning of the window +{% for tile in range(1, 7) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% endif %} {% endfor %} ###################################### # copy ensemble mean increment to COM @@ -59,5 +60,11 @@ copy: {% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} {% for tile in range(1, 7) %} -- ["{{ DATA }}/inc/ensmean/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% if DOIAU %} +# if using IAU, also need increment copied at the beginning of the window +{% for tile in range(1, 7) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} +{% endif %} \ No newline at end of file diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index a874b46f82..378fa44d10 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -239,12 +239,12 @@ def addEnsIncrements(self) -> None: for itile in range(1, 7): filename_in = template_in.format(tilenum=itile) filename_out = template_out.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) + src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) dest = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_out) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() # if running with IAU, we also need an analysis at the beginning of the window - bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) + bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) for bkg_time in bkg_times: for mem in range(1, self.task_config.NMEM_ENS + 1): From 9c47927c907ea8c5f2da34ed4f91869060be6432 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 13 Jun 2024 17:18:26 +0000 Subject: [PATCH 36/90] Update gdas --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index ebe1d35420..d31189a6a3 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit ebe1d35420175d1e5dfcdad4fddb80fabf377bde +Subproject commit d31189a6a3d8aa6858aef1c6e99b3db16f7ab313 From 33f60802ff52886fa4ecd814674c77623abdb895 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 13 Jun 2024 18:10:10 +0000 Subject: [PATCH 37/90] update enkf sfc update --- scripts/exgdas_enkf_sfc.sh | 95 ++++++++++++++++++++++++++++++-------- 1 file changed, 76 insertions(+), 19 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 2720dd5d5f..256671f7c1 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -137,6 +137,7 @@ if [ $DOIAU = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -150,20 +151,26 @@ if [ $DOIAU = "YES" ]; then COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL + + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 @@ -175,6 +182,33 @@ if [ $DOIAU = "YES" ]; then CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "$smem") + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + ${NCP} "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + + + if [[ ${GSI_SOILANAL} = "YES" ]]; then + FHR=6 + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + "${DATA}/lnd_incr.${cmem}" + fi + done # ensembles + done fi @@ -184,6 +218,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -193,28 +228,50 @@ if [ $DOSFCANL_ENKF = "YES" ]; then cmem=$(printf %03i $imem) memchar="mem$cmem" - MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + COM_SNOW_ANALYSIS_TMPL + + RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL - [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "$smem") + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + + ${NCP} "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + + done + done fi From 0643c9589344e0a6be396c4eccb0d55ba565be1f Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 13 Jun 2024 18:57:37 +0000 Subject: [PATCH 38/90] more changes --- ci/cases/pr/C96_atmaerosnowDA.yaml | 2 +- parm/gdas/snow_finalize_ens_update.yaml.j2 | 2 +- parm/gdas/snow_stage_ens_update.yaml.j2 | 2 +- parm/gdas/snow_stage_orog.yaml.j2 | 2 +- scripts/exgdas_enkf_sfc.sh | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml index edde37cbf7..ade1b253c9 100644 --- a/ci/cases/pr/C96_atmaerosnowDA.yaml +++ b/ci/cases/pr/C96_atmaerosnowDA.yaml @@ -11,7 +11,7 @@ arguments: icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 idate: 2021122012 edate: 2021122100 - nens: 0 + nens: 80 gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index 3508a75289..dbd6eea2f0 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -67,4 +67,4 @@ copy: {% for tile in range(1, 7) %} - ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} -{% endif %} \ No newline at end of file +{% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 22fc1bf44d..02e1846fed 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -110,4 +110,4 @@ copy: - ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 index 9d66fa6fe6..cc0d1bc01a 100644 --- a/parm/gdas/snow_stage_orog.yaml.j2 +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -9,4 +9,4 @@ copy: - ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] - ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] - ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 256671f7c1..b6778badbb 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -159,7 +159,7 @@ if [ $DOIAU = "YES" ]; then MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL - # determine where the input snow restart files come from + # determine where the input snow restart files come from if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" else From 54539cd60075432fb3dce0117b81eb2f5abe498a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 13 Jun 2024 20:11:30 +0000 Subject: [PATCH 39/90] update ci case --- ci/cases/pr/C96_atmaerosnowDA.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml index ade1b253c9..923480de9c 100644 --- a/ci/cases/pr/C96_atmaerosnowDA.yaml +++ b/ci/cases/pr/C96_atmaerosnowDA.yaml @@ -6,12 +6,13 @@ arguments: pslot: {{ 'pslot' | getenv }} app: ATMA resdetatmos: 96 + resensatmos: 48 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 idate: 2021122012 edate: 2021122100 - nens: 80 + nens: 2 gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml From 554b5456f6bb0fbf51441902c1c7d9ab3333ee79 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 14 Jun 2024 12:21:11 +0000 Subject: [PATCH 40/90] fix dependencies --- sorc/gdas.cd | 2 +- workflow/rocoto/gfs_tasks.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d31189a6a3..36a3f98b61 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d31189a6a3d8aa6858aef1c6e99b3db16f7ab313 +Subproject commit 36a3f98b613ea4324a34ee3bd098547c0d770593 diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index bfc05c2d52..ec7035c9ff 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -605,9 +605,9 @@ def snowanl(self): def esnowanl(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}prepsnowobs'} + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prepsnowobs'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.cdump}snowanl'} + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}snowanl'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From aa4311874745e862c32052130c1543f8e7988592 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 14 Jun 2024 12:44:40 +0000 Subject: [PATCH 41/90] ignore these bufr2ioda links --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8314f5bae9..64a30900fc 100644 --- a/.gitignore +++ b/.gitignore @@ -190,6 +190,7 @@ ush/month_name.sh ush/imsfv3_scf2ioda.py ush/atparse.bash ush/run_bufr2ioda.py +ush/bufr2ioda_insitu* # version files versions/build.ver From fa8dd8a1a5cccd1a4dc79007ce23937c5f145e39 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 14 Jun 2024 14:14:56 +0000 Subject: [PATCH 42/90] fixing some bugs --- scripts/exgdas_enkf_sfc.sh | 2 ++ workflow/rocoto/gfs_tasks.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index b6778badbb..d2940fbab9 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -169,6 +169,7 @@ if [ $DOIAU = "YES" ]; then [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" ${NCP} "${sfcdata_dir}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" @@ -245,6 +246,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then ${NCP} "${sfcdata_dir}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index ec7035c9ff..dc6eb06dfe 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -609,6 +609,8 @@ def esnowanl(self): deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}snowanl'} deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esnowanl') From ea0bbf9b3433d9c26aae526c8c1010a888e9dcb3 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 14 Jun 2024 16:38:44 +0000 Subject: [PATCH 43/90] shellcheck --- jobs/JGLOBAL_PREP_SNOW_OBS | 4 ++-- jobs/JGLOBAL_SNOW_ANALYSIS | 4 ++-- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 9 +++------ scripts/exgdas_enkf_sfc.sh | 8 ++++---- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/jobs/JGLOBAL_PREP_SNOW_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS index f3d310eec8..0e3557697d 100755 --- a/jobs/JGLOBAL_PREP_SNOW_OBS +++ b/jobs/JGLOBAL_PREP_SNOW_OBS @@ -44,7 +44,7 @@ fi ########################################## # Remove the Temporary working directory ########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index 462108f6af..e0f24fa624 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -47,7 +47,7 @@ fi ########################################## # Remove the Temporary working directory ########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index c858eb54a7..42230136e6 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -9,9 +9,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" ############################################## # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -gcyc=${GDATE:8:2} GDUMP="gdas" GDUMP_ENS="enkf${GDUMP}" export GDUMP @@ -25,7 +22,7 @@ MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" -for imem in $(seq 1 ${NMEM_ENS}); do +for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" @@ -53,7 +50,7 @@ fi ########################################## # Remove the Temporary working directory ########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index d2940fbab9..4c15cc048e 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -256,14 +256,14 @@ if [ $DOSFCANL_ENKF = "YES" ]; then export err=$?; err_chk # Copy outputs from DATA to COMOUT - for imem in $(seq 1 $NMEM_ENS); do + for imem in $(seq 1 "${NMEM_ENS}"); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then smem=$((smem - NMEM_ENS_MAX)) fi - gmemchar="mem"$(printf %03i "$smem") - cmem=$(printf %03i $imem) - memchar="mem$cmem" + gmemchar="mem"$(printf %03i "${smem}") + cmem=$(printf %03i "${imem}") + memchar="mem${cmem}" MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL From eb4cb43ceb13f6a9b12133161c3bfda2c74e0b24 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 14 Jun 2024 16:46:17 +0000 Subject: [PATCH 44/90] more shellcheck --- scripts/exgdas_enkf_sfc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 4c15cc048e..4bb7c6f48a 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -281,8 +281,8 @@ fi ################################################################################ # Postprocessing -cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA +cd "${pwd}" || exit 1 +[[ ${mkdata} = "YES" ]] && rm -rf "${DATA}" -exit $err +exit ${err} From a5a54eca55fd8d33d6bf11b916fdd3583198afeb Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 27 Jun 2024 13:41:47 +0000 Subject: [PATCH 45/90] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index acbc389320..2daa9fd051 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit acbc38932050921cd7a3b021ea6aa1a5b1279216 +Subproject commit 2daa9fd0514bd164433dcd1b7ef9796329163bc8 From f31a90e979c6b0d9890b97dc8638ae9e6b3f1bda Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 28 Jun 2024 14:36:31 +0000 Subject: [PATCH 46/90] address reviewer comments --- env/HERA.env | 4 +- env/HERCULES.env | 4 +- env/JET.env | 4 +- env/ORION.env | 4 +- env/S4.env | 4 +- env/WCOSS2.env | 4 +- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 10 +-- parm/config/gfs/config.resources | 2 + parm/gdas/snow_finalize_ens_update.yaml.j2 | 72 +++++++++--------- parm/gdas/snow_stage_ens_update.yaml.j2 | 86 +++++++++++----------- ush/python/pygfs/task/snowens_analysis.py | 2 +- 11 files changed, 99 insertions(+), 97 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 7cccfb3b4f..4ea463d6c3 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -112,7 +112,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "esnowanl" ]]; then @@ -122,7 +122,7 @@ elif [[ "${step}" = "esnowanl" ]]; then [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "ocnanalbmat" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 8c8e3119c5..6903be977e 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -110,7 +110,7 @@ case ${step} in [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" ;; "esnowanl") @@ -120,7 +120,7 @@ case ${step} in [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" ;; "ocnanalbmat") diff --git a/env/JET.env b/env/JET.env index 4d4ed2f9fd..4455993845 100755 --- a/env/JET.env +++ b/env/JET.env @@ -92,7 +92,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "esnowanl" ]]; then @@ -102,7 +102,7 @@ elif [[ "${step}" = "esnowanl" ]]; then [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/ORION.env b/env/ORION.env index a9f9384b65..2f1bbe18a6 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -100,7 +100,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "esnowanl" ]]; then @@ -110,7 +110,7 @@ elif [[ "${step}" = "esnowanl" ]]; then [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/S4.env b/env/S4.env index 3da5e3b042..9777f4e755 100755 --- a/env/S4.env +++ b/env/S4.env @@ -92,7 +92,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "esnowanl" ]]; then @@ -102,7 +102,7 @@ elif [[ "${step}" = "esnowanl" ]]; then [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 143e5901fc..b22241bcd5 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -85,7 +85,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}6" elif [[ "${step}" = "esnowanl" ]]; then @@ -95,7 +95,7 @@ elif [[ "${step}" = "esnowanl" ]]; then [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n 6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index 42230136e6..2a3593c8b8 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -17,15 +17,15 @@ export GDUMP # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_ATMOS_ANALYSIS COM_CONF -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS COMOUT_ATMOS_ANALYSIS COMOUT_CONF +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS -mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" +mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COM_SNOW_ANALYSIS - mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS + mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" done ############################################################### diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 5be59cc561..b80a67018e 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -369,6 +369,7 @@ case ${step} in export npe_snowanl=$(( layout_x * layout_y * 6 )) export nth_snowanl=1 export npe_node_snowanl=$(( npe_node_max / nth_snowanl )) + export npe_apply_incr=6 ;; "esnowanl") @@ -398,6 +399,7 @@ case ${step} in export npe_esnowanl=$(( layout_x * layout_y * 6 )) export nth_esnowanl=1 export npe_node_esnowanl=$(( npe_node_max / nth_esnowanl )) + export npe_apply_incr=6 ;; "prepobsaero") diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index dbd6eea2f0..698223268f 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -8,34 +8,34 @@ copy: # copy analyses to directories ###################################### {% for mem in range(1, NMEM_ENS + 1) %} -# define variables -# Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':RUN, - 'YMD':PDY, - 'HH':cyc, - 'MEMDIR':"mem" + '%03d' % mem} %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':PDY, + 'HH':cyc, + 'MEMDIR':"mem" + '%03d' % mem} %} -# Replace template variables with tmpl_dict, one key at a time -# This must be done in a namespace to overcome jinja scoping -# Variables set inside of a for loop are lost at the end of the loop -# unless they are part of a namespace -{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} + # Replace template variables with tmpl_dict, one key at a time + # This must be done in a namespace to overcome jinja scoping + # Variables set inside of a for loop are lost at the end of the loop + # unless they are part of a namespace + {% set com_ns = namespace(COMOUT_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} -{% for key in tmpl_dict.keys() %} -{% set search_term = '${' + key + '}' %} -{% set replace_term = tmpl_dict[key] %} -{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} -{% endfor %} -{% for tile in range(1, 7) %} -- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -{% if DOIAU %} -# if using IAU, also need analyses copied at the beginning of the window -{% for tile in range(1, 7) %} -- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -{% endif %} + {% for key in tmpl_dict.keys() %} + {% set search_term = '${' + key + '}' %} + {% set replace_term = tmpl_dict[key] %} + {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} + {% endfor %} + {% for tile in range(1, 7) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need analyses copied at the beginning of the window + {% for tile in range(1, 7) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} {% endfor %} ###################################### # copy ensemble mean increment to COM @@ -52,19 +52,19 @@ copy: # This must be done in a namespace to overcome jinja scoping # Variables set inside of a for loop are lost at the end of the loop # unless they are part of a namespace -{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} +{% set com_ns = namespace(COMOUT_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} {% for key in tmpl_dict.keys() %} -{% set search_term = '${' + key + '}' %} -{% set replace_term = tmpl_dict[key] %} -{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} -{% endfor %} -{% for tile in range(1, 7) %} -- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% set search_term = '${' + key + '}' %} + {% set replace_term = tmpl_dict[key] %} + {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} -{% if DOIAU %} -# if using IAU, also need increment copied at the beginning of the window {% for tile in range(1, 7) %} -- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} +{% if DOIAU == True %} + # if using IAU, also need increment copied at the beginning of the window + {% for tile in range(1, 7) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} {% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 02e1846fed..79518b2484 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -5,10 +5,10 @@ {% set gcyc = previous_cycle | strftime("%H") %} {% set PDY = current_cycle | to_YMD %} {% set cyc = current_cycle | strftime("%H") %} -{% if DOIAU %} -{% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} +{% if DOIAU == True %} + {% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} {% else %} -{% set bkg_time = current_cycle | to_fv3time %} + {% set bkg_time = current_cycle | to_fv3time %} {% endif %} ###################################### # create working directories @@ -39,15 +39,15 @@ copy: # This must be done in a namespace to overcome jinja scoping # Variables set inside of a for loop are lost at the end of the loop # unless they are part of a namespace -{% set com_prev_ns = namespace(COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} +{% set com_prev_ns = namespace(COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} {% for key in tmpl_dict.keys() %} -{% set search_term = '${' + key + '}' %} -{% set replace_term = tmpl_dict[key] %} -{% set com_prev_ns.COM_ATMOS_RESTART_MEM = com_prev_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} + {% set search_term = '${' + key + '}' %} + {% set replace_term = tmpl_dict[key] %} + {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} {% endfor %} {% for tile in range(1, 7) %} -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### # copy deterministic increment files @@ -64,50 +64,50 @@ copy: # This must be done in a namespace to overcome jinja scoping # Variables set inside of a for loop are lost at the end of the loop # unless they are part of a namespace -{% set com_ns = namespace(COM_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} +{% set com_ns = namespace(COMIN_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} {% for key in tmpl_dict.keys() %} -{% set search_term = '${' + key + '}' %} -{% set replace_term = tmpl_dict[key] %} -{% set com_ns.COM_SNOW_ANALYSIS_MEM = com_ns.COM_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} + {% set search_term = '${' + key + '}' %} + {% set replace_term = tmpl_dict[key] %} + {% set com_ns.COMIN_SNOW_ANALYSIS_MEM = com_ns.COMIN_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} {% for tile in range(1, 7) %} -- ["{{ com_ns.COM_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_ns.COMIN_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### # copy ensemble background files ###################################### {% for mem in range(1, NMEM_ENS + 1) %} -# define variables -# Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':RUN, - 'YMD':gPDY, - 'HH':gcyc, - 'MEMDIR':"mem" + '%03d' % mem} %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':gPDY, + 'HH':gcyc, + 'MEMDIR':"mem" + '%03d' % mem} %} -# Replace template variables with tmpl_dict, one key at a time -# This must be done in a namespace to overcome jinja scoping -# Variables set inside of a for loop are lost at the end of the loop -# unless they are part of a namespace -{% set com_prev_ns = namespace(COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} + # Replace template variables with tmpl_dict, one key at a time + # This must be done in a namespace to overcome jinja scoping + # Variables set inside of a for loop are lost at the end of the loop + # unless they are part of a namespace + {% set com_prev_ns = namespace(COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} -{% for key in tmpl_dict.keys() %} -{% set search_term = '${' + key + '}' %} -{% set replace_term = tmpl_dict[key] %} -{% set com_prev_ns.COM_ATMOS_RESTART_MEM = com_prev_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} -{% endfor %} -# we need to copy them to two places, one serves as the basis for the analysis -{% for tile in range(1, 7) %} -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -{% if DOIAU %} -# if using IAU, also need backgrounds copied at the beginning of the window -# we need to copy them to two places, one serves as the basis for the analysis -{% for tile in range(1, 7) %} -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -- ["{{ com_prev_ns.COM_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -{% endif %} + {% for key in tmpl_dict.keys() %} + {% set search_term = '${' + key + '}' %} + {% set replace_term = tmpl_dict[key] %} + {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} + {% endfor %} + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, 7) %} +- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need backgrounds copied at the beginning of the window + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, 7) %} +- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} {% endfor %} diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 378fa44d10..e6764d25ea 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -281,7 +281,7 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: config: Dict Dictionary of key-value pairs needed in this method Should contain the following keys: - COM_ATMOS_RESTART_PREV + COMIN_ATMOS_RESTART_PREV DATA current_cycle ntiles From 4e64b86520423ddee51f573d942f5743ee7dc477 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 28 Jun 2024 14:54:24 +0000 Subject: [PATCH 47/90] update name of test --- ci/Jenkinsfile | 6 +++--- .../{C96_atmaerosnowDA.yaml => C96_hybatmaerosnowDA.yaml} | 0 2 files changed, 3 insertions(+), 3 deletions(-) rename ci/cases/pr/{C96_atmaerosnowDA.yaml => C96_hybatmaerosnowDA.yaml} (100%) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 956bd692dd..1dda6b2476 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -181,14 +181,14 @@ pipeline { } stage('4. Run Tests') { - failFast false + failFast false matrix { agent { label NodeName[machine].toLowerCase() } axes { axis { name 'Case' // TODO add dynamic list of cases from env vars (needs addtional plugins) - values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA' + values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_hybatmaerosnowDA' } } stages { @@ -296,6 +296,6 @@ pipeline { } } } - } + } } } diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_hybatmaerosnowDA.yaml similarity index 100% rename from ci/cases/pr/C96_atmaerosnowDA.yaml rename to ci/cases/pr/C96_hybatmaerosnowDA.yaml From 1b9b0855719740ee1b9e6466e8ae1aa5a311cfb9 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 28 Jun 2024 10:59:28 -0400 Subject: [PATCH 48/90] Update env/WCOSS2.env Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- env/WCOSS2.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/WCOSS2.env b/env/WCOSS2.env index b22241bcd5..3f951ff46a 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -85,7 +85,7 @@ elif [[ "${step}" = "snowanl" ]]; then [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max} export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}6" + export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" elif [[ "${step}" = "esnowanl" ]]; then From 80d46305d9d2e58699ed30351a9e66248717bd2a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 28 Jun 2024 17:22:48 +0000 Subject: [PATCH 49/90] no need for a non-standard DATA directory --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 1 - 1 file changed, 1 deletion(-) diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index 2a3593c8b8..feb9b0441e 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -1,7 +1,6 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -export DATA=${DATA:-${DATAROOT}/${RUN}esnowanl_${cyc}} source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" ############################################## From 9897e710bd0c7f101dbfa5f529264ca2c198bb04 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 28 Jun 2024 13:39:46 -0400 Subject: [PATCH 50/90] Update scripts/exgdas_enkf_sfc.sh Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- scripts/exgdas_enkf_sfc.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 4bb7c6f48a..6d32c422a4 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -229,8 +229,6 @@ if [ $DOSFCANL_ENKF = "YES" ]; then cmem=$(printf %03i $imem) memchar="mem$cmem" - COM_SNOW_ANALYSIS_TMPL - RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL From f85c88095809040c6e5d7eb26e1416e7d75d8d0f Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 28 Jun 2024 18:07:00 +0000 Subject: [PATCH 51/90] address reviewer comments --- ci/Jenkinsfile | 2 +- ...owDA.yaml => C96C48_hybatmaerosnowDA.yaml} | 0 jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 18 ++++++++------ parm/gdas/snow_finalize_ens_update.yaml.j2 | 21 +++++++--------- parm/gdas/snow_stage_ens_update.yaml.j2 | 24 ++++++++----------- parm/gdas/snow_stage_orog.yaml.j2 | 12 +++++----- scripts/exgdas_enkf_sfc.sh | 6 ++--- 7 files changed, 39 insertions(+), 44 deletions(-) rename ci/cases/pr/{C96_hybatmaerosnowDA.yaml => C96C48_hybatmaerosnowDA.yaml} (100%) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 1dda6b2476..d643a5cb3c 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -188,7 +188,7 @@ pipeline { axis { name 'Case' // TODO add dynamic list of cases from env vars (needs addtional plugins) - values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_hybatmaerosnowDA' + values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96C48_hybatmaerosnowDA' } } stages { diff --git a/ci/cases/pr/C96_hybatmaerosnowDA.yaml b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml similarity index 100% rename from ci/cases/pr/C96_hybatmaerosnowDA.yaml rename to ci/cases/pr/C96C48_hybatmaerosnowDA.yaml diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index feb9b0441e..9ae9b66eeb 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -9,22 +9,26 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 GDUMP="gdas" -GDUMP_ENS="enkf${GDUMP}" +GDUMP_ENS="enkfgdas" export GDUMP ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS COMOUT_ATMOS_ANALYSIS COMOUT_CONF -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL -mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" +mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS - mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" + mkdir -p "${COMOUT_SNOW_ANALYSIS}" done ############################################################### @@ -33,7 +37,7 @@ done EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_ensemble_analysis.py} ${EXSCRIPT} status=$? -[[ ${status} -ne 0 ]] && exit "${status}" +(( status != 0 )) && exit "${status}" ############################################## # End JOB SPECIFIC work @@ -50,6 +54,6 @@ fi # Remove the Temporary working directory ########################################## cd "${DATAROOT}" || exit 1 -[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index 698223268f..aad6b14fc0 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -1,8 +1,3 @@ -###################################### -# set some variables -###################################### -{% set PDY = current_cycle | to_YMD %} -{% set cyc = current_cycle | strftime("%H") %} copy: ###################################### # copy analyses to directories @@ -12,8 +7,8 @@ copy: # Declare a dict of search and replace terms to run on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, 'RUN':RUN, - 'YMD':PDY, - 'HH':cyc, + 'YMD':current_cycle | to_YMD , + 'HH':current_cycle | strftime("%H"), 'MEMDIR':"mem" + '%03d' % mem} %} # Replace template variables with tmpl_dict, one key at a time @@ -27,12 +22,12 @@ copy: {% set replace_term = tmpl_dict[key] %} {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} - {% for tile in range(1, 7) %} + {% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need analyses copied at the beginning of the window - {% for tile in range(1, 7) %} + {% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} @@ -44,8 +39,8 @@ copy: # Declare a dict of search and replace terms to run on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, 'RUN':RUN, - 'YMD':PDY, - 'HH':cyc, + 'YMD':current_cycle | to_YMD , + 'HH':current_cycle | strftime("%H"), 'MEMDIR':"ensstat"} %} # Replace template variables with tmpl_dict, one key at a time @@ -59,12 +54,12 @@ copy: {% set replace_term = tmpl_dict[key] %} {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} -{% for tile in range(1, 7) %} +{% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need increment copied at the beginning of the window - {% for tile in range(1, 7) %} + {% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 79518b2484..4f1c66f23e 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -1,10 +1,6 @@ ###################################### # set some variables ###################################### -{% set gPDY = previous_cycle | to_YMD %} -{% set gcyc = previous_cycle | strftime("%H") %} -{% set PDY = current_cycle | to_YMD %} -{% set cyc = current_cycle | strftime("%H") %} {% if DOIAU == True %} {% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} {% else %} @@ -31,8 +27,8 @@ copy: # Declare a dict of search and replace terms to run on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, 'RUN':GDUMP, - 'YMD':gPDY, - 'HH':gcyc, + 'YMD':previous_cycle | to_YMD, + 'HH':previous_cycle | strftime("%H"), 'MEMDIR':""} %} # Replace template variables with tmpl_dict, one key at a time @@ -46,7 +42,7 @@ copy: {% set replace_term = tmpl_dict[key] %} {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} {% endfor %} -{% for tile in range(1, 7) %} +{% for tile in range(1, ntiles+1) %} - ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### @@ -56,8 +52,8 @@ copy: # Declare a dict of search and replace terms to run on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, 'RUN':GDUMP, - 'YMD':PDY, - 'HH':cyc, + 'YMD':current_cycle | to_YMD, + 'HH':current_cycle | strftime("%H"), 'MEMDIR':""} %} # Replace template variables with tmpl_dict, one key at a time @@ -71,7 +67,7 @@ copy: {% set replace_term = tmpl_dict[key] %} {% set com_ns.COMIN_SNOW_ANALYSIS_MEM = com_ns.COMIN_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} {% endfor %} -{% for tile in range(1, 7) %} +{% for tile in range(1, ntiles+1) %} - ["{{ com_ns.COMIN_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### @@ -82,8 +78,8 @@ copy: # Declare a dict of search and replace terms to run on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, 'RUN':RUN, - 'YMD':gPDY, - 'HH':gcyc, + 'YMD':previous_cycle | to_YMD, + 'HH':previous_cycle | strftime("%H"), 'MEMDIR':"mem" + '%03d' % mem} %} # Replace template variables with tmpl_dict, one key at a time @@ -98,14 +94,14 @@ copy: {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} {% endfor %} # we need to copy them to two places, one serves as the basis for the analysis - {% for tile in range(1, 7) %} + {% for tile in range(1, ntiles+1) %} - ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] - ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need backgrounds copied at the beginning of the window # we need to copy them to two places, one serves as the basis for the analysis - {% for tile in range(1, 7) %} + {% for tile in range(1, ntiles+1) %} - ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] - ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 index cc0d1bc01a..024b2e03f0 100644 --- a/parm/gdas/snow_stage_orog.yaml.j2 +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -2,11 +2,11 @@ mkdir: - "{{ DATA }}/orog/det" - "{{ DATA }}/orog/ens" copy: -- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] -- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] {% for tile in range(1, 7) %} -- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] -- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] -- ["{{ FIXgfs }}/orog/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] -- ["{{ FIXgfs }}/orog/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] {% endfor %} diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 6d32c422a4..376b73666f 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -170,12 +170,12 @@ if [ $DOIAU = "YES" ]; then ${NCP} "${sfcdata_dir}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 - ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ "${DATA}/lnd_incr.${cmem}" fi done # ensembles From a207fe5086ea22fc35ad1a26f0bc6d7c642ec6fc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 1 Jul 2024 15:37:48 +0000 Subject: [PATCH 52/90] fix template issue + updates for g-w develop merge --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 6 +++--- ush/python/pygfs/task/snowens_analysis.py | 24 +++++++++++------------ 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index 9ae9b66eeb..19c9b1bd95 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -19,15 +19,15 @@ export GDUMP YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ COMIN_OBS:COM_OBS_TMPL \ COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ - COMOUT_CONF:COM_CONF_TMPL -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_CONF:COM_CONF_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL mkdir -p "${COMOUT_SNOW_ANALYSIS}" done diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index e6764d25ea..717c0a2f29 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -30,32 +30,32 @@ class SnowEnsAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res_det = int(self.config['CASE'][1:]) - _res_ens = int(self.config['CASE_ENS'][1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _recenter_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.land_recenter.yaml") + _res_det = int(self.task_config['CASE'][1:]) + _res_ens = int(self.task_config['CASE_ENS'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _recenter_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.land_recenter.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res_ens + 1, 'npy_ges': _res_ens + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, - 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", + 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", - 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'ATM_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'jedi_yaml': _recenter_yaml, } ) - bkg_time = _window_begin if self.config.DOIAU else self.runtime_config.current_cycle + bkg_time = _window_begin if self.task_config.DOIAU else self.task_config.current_cycle local_dict['bkg_time'] = bkg_time # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self) -> None: From 4f81dedef206dc91eeaf41cba12c4cb6f31474c2 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 1 Jul 2024 17:12:45 +0000 Subject: [PATCH 53/90] Address Jiarui's comments --- jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS | 3 ++- parm/gdas/snow_stage_orog.yaml.j2 | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS index 19c9b1bd95..afddcacb28 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS @@ -27,7 +27,8 @@ mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL mkdir -p "${COMOUT_SNOW_ANALYSIS}" done diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 index 024b2e03f0..3cd7d5c327 100644 --- a/parm/gdas/snow_stage_orog.yaml.j2 +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -4,7 +4,7 @@ mkdir: copy: - ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] - ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] -{% for tile in range(1, 7) %} +{% for tile in range(1, ntiles+1) %} - ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] - ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] - ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] From e2979b17efe79eaf3b212e2fd7ca4b2330a594af Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 3 Jul 2024 16:41:33 +0000 Subject: [PATCH 54/90] No NLN but NCP --- scripts/exgdas_enkf_sfc.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 376b73666f..e60f22bfb5 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -155,10 +155,10 @@ if [ $DOIAU = "YES" ]; then MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL - + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL - + # determine where the input snow restart files come from if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" @@ -205,7 +205,7 @@ if [ $DOIAU = "YES" ]; then if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 - ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ "${DATA}/lnd_incr.${cmem}" fi done # ensembles @@ -245,8 +245,8 @@ if [ $DOSFCANL_ENKF = "YES" ]; then ${NCP} "${sfcdata_dir}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done From 795e1d0e0c7b8c0d2d01ff1e357fcf6681286634 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Wed, 3 Jul 2024 14:32:52 -0400 Subject: [PATCH 55/90] Update jobs/rocoto/esnowanl.sh Co-authored-by: Rahul Mahajan --- jobs/rocoto/esnowanl.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh index 6aa640ee5e..bfeee9070f 100755 --- a/jobs/rocoto/esnowanl.sh +++ b/jobs/rocoto/esnowanl.sh @@ -11,11 +11,6 @@ status=$? export job="esnowanl" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH ############################################################### # Execute the JJOB From 1415c3d16e9c9e99957a0d0da45edd21072100d2 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 5 Jul 2024 14:14:14 +0000 Subject: [PATCH 56/90] address comments --- env/HERA.env | 10 +++++----- env/HERCULES.env | 10 +++++----- env/JET.env | 10 +++++----- env/ORION.env | 10 +++++----- env/S4.env | 10 +++++----- env/WCOSS2.env | 10 +++++----- ...OW_ENSEMBLE_ANALYSIS => JGDAS_ENKF_SNOW_RECENTER} | 4 ++-- jobs/rocoto/{esnowanl.sh => esnowrecen.sh} | 10 ++-------- .../gfs/{config.esnowanl => config.esnowrecen} | 8 ++++---- parm/config/gfs/config.resources | 12 ++++++------ ...mble_analysis.py => exgdas_enkf_snow_recenter.py} | 9 +++++---- ush/python/pygfs/task/snowens_analysis.py | 11 ++++------- workflow/applications/gfs_cycled.py | 6 +++--- workflow/rocoto/gfs_tasks.py | 10 +++++----- workflow/rocoto/tasks.py | 2 +- 15 files changed, 62 insertions(+), 70 deletions(-) rename jobs/{JGLOBAL_SNOW_ENSEMBLE_ANALYSIS => JGDAS_ENKF_SNOW_RECENTER} (92%) rename jobs/rocoto/{esnowanl.sh => esnowrecen.sh} (53%) rename parm/config/gfs/{config.esnowanl => config.esnowrecen} (85%) rename scripts/{exglobal_snow_ensemble_analysis.py => exgdas_enkf_snow_recenter.py} (74%) diff --git a/env/HERA.env b/env/HERA.env index 4ea463d6c3..6c0b98da79 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -114,13 +114,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" -elif [[ "${step}" = "esnowanl" ]]; then +elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" diff --git a/env/HERCULES.env b/env/HERCULES.env index 392904be9f..7008df5819 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -112,13 +112,13 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" ;; - "esnowanl") + "esnowrecen") - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" ;; diff --git a/env/JET.env b/env/JET.env index 4455993845..bba593b6e3 100755 --- a/env/JET.env +++ b/env/JET.env @@ -94,13 +94,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" -elif [[ "${step}" = "esnowanl" ]]; then +elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" diff --git a/env/ORION.env b/env/ORION.env index 2f1bbe18a6..6919abfe74 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -102,13 +102,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" -elif [[ "${step}" = "esnowanl" ]]; then +elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" diff --git a/env/S4.env b/env/S4.env index 9777f4e755..f36a3cc63a 100755 --- a/env/S4.env +++ b/env/S4.env @@ -94,13 +94,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" -elif [[ "${step}" = "esnowanl" ]]; then +elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 3f951ff46a..c08f5adbce 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -87,13 +87,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" -elif [[ "${step}" = "esnowanl" ]]; then +elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowanl)) + nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWANL=${nth_esnowanl:-${nth_max}} - [[ ${NTHREADS_ESNOWANL} -gt ${nth_max} ]] && export NTHREADS_ESNOWANL=${nth_max} - export APRUN_ESNOWANL="${launcher} -n ${npe_esnowanl} --cpus-per-task=${NTHREADS_ESNOWANL}" + export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} + [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" diff --git a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS b/jobs/JGDAS_ENKF_SNOW_RECENTER similarity index 92% rename from jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS rename to jobs/JGDAS_ENKF_SNOW_RECENTER index 19c9b1bd95..b794774a92 100755 --- a/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS +++ b/jobs/JGDAS_ENKF_SNOW_RECENTER @@ -1,7 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" +source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowrecen" -c "base esnowrecen" ############################################## # Set variables used in the script @@ -34,7 +34,7 @@ done ############################################################### # Run relevant script -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_ensemble_analysis.py} +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_recenter.py} ${EXSCRIPT} status=$? (( status != 0 )) && exit "${status}" diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowrecen.sh similarity index 53% rename from jobs/rocoto/esnowanl.sh rename to jobs/rocoto/esnowrecen.sh index 6aa640ee5e..f8c4f8f7fc 100755 --- a/jobs/rocoto/esnowanl.sh +++ b/jobs/rocoto/esnowrecen.sh @@ -8,17 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="esnowanl" +export job="esnowrecen" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_SNOW_ENSEMBLE_ANALYSIS" +"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_RECENTER" status=$? exit "${status}" diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowrecen similarity index 85% rename from parm/config/gfs/config.esnowanl rename to parm/config/gfs/config.esnowrecen index 5d57c120a0..adb039559a 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowrecen @@ -1,12 +1,12 @@ #! /usr/bin/env bash -########## config.esnowanl ########## +########## config.esnowrecen ########## # configuration common to snow ensemble analysis tasks -echo "BEGIN: config.esnowanl" +echo "BEGIN: config.esnowrecen" # Get task specific resources -source "${EXPDIR}/config.resources" esnowanl +source "${EXPDIR}/config.resources" esnowrecen export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" @@ -26,4 +26,4 @@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x export FREGRID=${EXECgfs}/fregrid.x -echo "END: config.esnowanl" +echo "END: config.esnowrecen" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index b80a67018e..f26aec0a5f 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -14,7 +14,7 @@ if (( $# != 1 )); then echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" - echo "snowanl esnowanl" + echo "snowanl esnowrecen" echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" @@ -372,7 +372,7 @@ case ${step} in export npe_apply_incr=6 ;; - "esnowanl") + "esnowrecen") # below lines are for creating JEDI YAML case ${CASE} in "C768") @@ -395,10 +395,10 @@ case ${step} in export layout_x export layout_y - export wtime_esnowanl="00:15:00" - export npe_esnowanl=$(( layout_x * layout_y * 6 )) - export nth_esnowanl=1 - export npe_node_esnowanl=$(( npe_node_max / nth_esnowanl )) + export wtime_esnowrecen="00:15:00" + export npe_esnowrecen=$(( layout_x * layout_y * 6 )) + export nth_esnowrecen=1 + export npe_node_esnowrecen=$(( npe_node_max / nth_esnowrecen )) export npe_apply_incr=6 ;; diff --git a/scripts/exglobal_snow_ensemble_analysis.py b/scripts/exgdas_enkf_snow_recenter.py similarity index 74% rename from scripts/exglobal_snow_ensemble_analysis.py rename to scripts/exgdas_enkf_snow_recenter.py index f4a8ffbb7d..2b23f9dca3 100755 --- a/scripts/exglobal_snow_ensemble_analysis.py +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -1,12 +1,13 @@ #!/usr/bin/env python3 -# exglobal_snow_ensemble_analysis.py +# exgdas_enkf_snow_recenter.py # This script creates an SnowEnsAnalysis class -# and runs the initialize, execute and finalize methods -# for a global Snow Depth ensemble analysis +# and will recenter the ensemble mean to the +# deterministic analysis and provide increments +# to create an ensemble of snow analyses import os from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.snowens_analysis import SnowEnsAnalysis +from pygfs.snowens_analysis import SnowEnsAnalysis # Initialize root logger logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 717c0a2f29..88afbf60c9 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -3,20 +3,18 @@ import os from logging import getLogger from typing import Dict, List, Any -from pprint import pformat import numpy as np -from netCDF4 import Dataset from wxflow import (AttrDict, FileHandler, - to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, + to_fv3time, to_YMD, to_timedelta, add_to_datetime, rm_p, chdir, parse_j2yaml, save_as_yaml, Jinja, logit, Executable, WorkflowException) -from pygfs.task.analysis import Analysis +from pygfs.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) @@ -185,7 +183,7 @@ def recenterEns(self) -> None: Instance of the SnowEnsAnalysis object """ logger.info("Running recentering code") - exec_cmd = Executable(self.task_config.APRUN_ESNOWANL) + exec_cmd = Executable(self.task_config.APRUN_ESNOWRECEN) exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg(self.task_config.jedi_yaml) @@ -259,7 +257,7 @@ def addEnsIncrements(self) -> None: 'current_cycle': bkg_time, 'CASE_ENS': self.task_config.CASE_ENS, 'OCNRES': self.task_config.OCNRES, - 'ntiles': 6, + 'ntiles': self.task_config.ntiles, 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, @@ -345,7 +343,6 @@ def add_increments(config: Dict) -> None: os.symlink(exe_src, exe_dest) # execute APPLY_INCR_EXE to create analysis files - print(os.getcwd()) exe = Executable(config.APRUN_APPLY_INCR) exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) logger.info(f"Executing {exe}") diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 37de1d9a8c..d8ae260359 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -114,7 +114,7 @@ def _get_app_configs(self): if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] if self.do_hybvar: - configs += ['esnowanl'] + configs += ['esnowrecen'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -170,7 +170,7 @@ def get_task_names(self): hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] if self.do_jedisnowda: - hybrid_tasks += ['esnowanl'] + hybrid_tasks += ['esnowrecen'] hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks @@ -301,7 +301,7 @@ def get_task_names(self): if self.do_hybvar and 'gfs' in self.eupd_cdumps: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") - enkfgfs_tasks.remove("esnowanl") + enkfgfs_tasks.remove("esnowrecen") tasks['enkfgfs'] = enkfgfs_tasks return tasks diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 0c2ef65ba7..e4c8c8105e 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -627,7 +627,7 @@ def snowanl(self): task = rocoto.create_task(task_dict) return task - def esnowanl(self): + def esnowrecen(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prepsnowobs'} @@ -638,14 +638,14 @@ def esnowanl(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('esnowanl') - task_name = f'{self.cdump}esnowanl' + resources = self.get_resource('esnowrecen') + task_name = f'{self.cdump}esnowrecen' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowanl.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowrecen.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2580,7 +2580,7 @@ def esfc(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jedisnowda: - dep_dict = {'type': 'task', 'name': f'{self.cdump}esnowanl'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}esnowrecen'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index fb1d1d22a2..8072ad01e2 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', - 'prepsnowobs', 'snowanl', 'esnowanl', + 'prepsnowobs', 'snowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmosprod', 'oceanprod', 'iceprod', From e98a65fe79d440ce61ea89eaac30e1358c618410 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 5 Jul 2024 12:05:18 -0400 Subject: [PATCH 57/90] Update WCOSS2.env --- env/WCOSS2.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/WCOSS2.env b/env/WCOSS2.env index c08f5adbce..d8e917b152 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -93,7 +93,7 @@ elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${nth_esnowrecen:-${nth_max}} [[ ${NTHREADS_ESNOWRECEN} -gt ${nth_max} ]] && export NTHREADS_ESNOWRECEN=${nth_max} - export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${launcher} -n ${npe_esnowrecen}" export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" From 57145e5eb4d4cc85bb3a0dabd1fcd5d76b014bab Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 5 Jul 2024 16:31:59 +0000 Subject: [PATCH 58/90] use new wxflow technique for templates --- parm/gdas/snow_finalize_ens_update.yaml.j2 | 30 ++------------- parm/gdas/snow_stage_ens_update.yaml.j2 | 45 +++------------------- 2 files changed, 10 insertions(+), 65 deletions(-) diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index aad6b14fc0..cdfd163c57 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -11,24 +11,13 @@ copy: 'HH':current_cycle | strftime("%H"), 'MEMDIR':"mem" + '%03d' % mem} %} - # Replace template variables with tmpl_dict, one key at a time - # This must be done in a namespace to overcome jinja scoping - # Variables set inside of a for loop are lost at the end of the loop - # unless they are part of a namespace - {% set com_ns = namespace(COMOUT_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} - - {% for key in tmpl_dict.keys() %} - {% set search_term = '${' + key + '}' %} - {% set replace_term = tmpl_dict[key] %} - {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} - {% endfor %} {% for tile in range(1, ntiles+1) %} -- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need analyses copied at the beginning of the window {% for tile in range(1, ntiles+1) %} -- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} {% endfor %} @@ -43,23 +32,12 @@ copy: 'HH':current_cycle | strftime("%H"), 'MEMDIR':"ensstat"} %} -# Replace template variables with tmpl_dict, one key at a time -# This must be done in a namespace to overcome jinja scoping -# Variables set inside of a for loop are lost at the end of the loop -# unless they are part of a namespace -{% set com_ns = namespace(COMOUT_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} - -{% for key in tmpl_dict.keys() %} - {% set search_term = '${' + key + '}' %} - {% set replace_term = tmpl_dict[key] %} - {% set com_ns.COMOUT_SNOW_ANALYSIS_MEM = com_ns.COMOUT_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} -{% endfor %} {% for tile in range(1, ntiles+1) %} -- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need increment copied at the beginning of the window {% for tile in range(1, ntiles+1) %} -- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ com_ns.COMOUT_SNOW_ANALYSIS_MEM }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 4f1c66f23e..40105fcc99 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -31,19 +31,8 @@ copy: 'HH':previous_cycle | strftime("%H"), 'MEMDIR':""} %} -# Replace template variables with tmpl_dict, one key at a time -# This must be done in a namespace to overcome jinja scoping -# Variables set inside of a for loop are lost at the end of the loop -# unless they are part of a namespace -{% set com_prev_ns = namespace(COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} - -{% for key in tmpl_dict.keys() %} - {% set search_term = '${' + key + '}' %} - {% set replace_term = tmpl_dict[key] %} - {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} -{% endfor %} {% for tile in range(1, ntiles+1) %} -- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### # copy deterministic increment files @@ -56,19 +45,8 @@ copy: 'HH':current_cycle | strftime("%H"), 'MEMDIR':""} %} -# Replace template variables with tmpl_dict, one key at a time -# This must be done in a namespace to overcome jinja scoping -# Variables set inside of a for loop are lost at the end of the loop -# unless they are part of a namespace -{% set com_ns = namespace(COMIN_SNOW_ANALYSIS_MEM = COM_SNOW_ANALYSIS_TMPL) %} - -{% for key in tmpl_dict.keys() %} - {% set search_term = '${' + key + '}' %} - {% set replace_term = tmpl_dict[key] %} - {% set com_ns.COMIN_SNOW_ANALYSIS_MEM = com_ns.COMIN_SNOW_ANALYSIS_MEM.replace(search_term, replace_term) %} -{% endfor %} {% for tile in range(1, ntiles+1) %} -- ["{{ com_ns.COMIN_SNOW_ANALYSIS_MEM }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} ###################################### # copy ensemble background files @@ -82,28 +60,17 @@ copy: 'HH':previous_cycle | strftime("%H"), 'MEMDIR':"mem" + '%03d' % mem} %} - # Replace template variables with tmpl_dict, one key at a time - # This must be done in a namespace to overcome jinja scoping - # Variables set inside of a for loop are lost at the end of the loop - # unless they are part of a namespace - {% set com_prev_ns = namespace(COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} - - {% for key in tmpl_dict.keys() %} - {% set search_term = '${' + key + '}' %} - {% set replace_term = tmpl_dict[key] %} - {% set com_prev_ns.COMIN_ATMOS_RESTART_MEM = com_prev_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} - {% endfor %} # we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, ntiles+1) %} -- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% if DOIAU == True %} # if using IAU, also need backgrounds copied at the beginning of the window # we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, ntiles+1) %} -- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] -- ["{{ com_prev_ns.COMIN_ATMOS_RESTART_MEM }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] {% endfor %} {% endif %} {% endfor %} From bbbb231f59638b60b304c90fc1da78cd9c447a19 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 5 Jul 2024 17:36:58 +0000 Subject: [PATCH 59/90] some python issues --- ush/python/pygfs/__init__.py | 1 + ush/python/pygfs/task/snowens_analysis.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index fa6b0b373e..3f2528bf3c 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -7,6 +7,7 @@ from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis from .task.snow_analysis import SnowAnalysis +from .task.snowens_analysis import SnowEnsAnalysis from .task.upp import UPP from .task.oceanice_products import OceanIceProducts from .task.gfs_forecast import GFSForecast diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 88afbf60c9..a996f436c7 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -14,7 +14,7 @@ logit, Executable, WorkflowException) -from pygfs.analysis import Analysis +from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) From 947a727a1a2eb154298f8ffcdf832eeb40a03655 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 5 Jul 2024 17:39:39 +0000 Subject: [PATCH 60/90] more issues --- scripts/exgdas_enkf_snow_recenter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py index 2b23f9dca3..63bf7306fa 100755 --- a/scripts/exgdas_enkf_snow_recenter.py +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -7,7 +7,7 @@ import os from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.snowens_analysis import SnowEnsAnalysis +from pygfs.task.snowens_analysis import SnowEnsAnalysis # Initialize root logger logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) From 7c6b91f327242473667361a76ed33a042d105312 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 5 Jul 2024 18:19:41 +0000 Subject: [PATCH 61/90] fix templating --- parm/gdas/snow_finalize_ens_update.yaml.j2 | 20 +++++++-------- parm/gdas/snow_stage_ens_update.yaml.j2 | 30 +++++++++++----------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 index cdfd163c57..a2a5763ab8 100644 --- a/parm/gdas/snow_finalize_ens_update.yaml.j2 +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -5,11 +5,11 @@ copy: {% for mem in range(1, NMEM_ENS + 1) %} # define variables # Declare a dict of search and replace terms to run on each template - {% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':RUN, - 'YMD':current_cycle | to_YMD , - 'HH':current_cycle | strftime("%H"), - 'MEMDIR':"mem" + '%03d' % mem} %} + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} {% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] @@ -26,11 +26,11 @@ copy: ###################################### # define variables # Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':RUN, - 'YMD':current_cycle | to_YMD , - 'HH':current_cycle | strftime("%H"), - 'MEMDIR':"ensstat"} %} +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"ensstat"} %} {% for tile in range(1, ntiles+1) %} - ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 40105fcc99..4ad5499751 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -25,11 +25,11 @@ copy: ###################################### # define variables # Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':GDUMP, - 'YMD':previous_cycle | to_YMD, - 'HH':previous_cycle | strftime("%H"), - 'MEMDIR':""} %} +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':""} %} {% for tile in range(1, ntiles+1) %} - ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] @@ -39,11 +39,11 @@ copy: ###################################### # define variables # Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':GDUMP, - 'YMD':current_cycle | to_YMD, - 'HH':current_cycle | strftime("%H"), - 'MEMDIR':""} %} +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':current_cycle | to_YMD, + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':""} %} {% for tile in range(1, ntiles+1) %} - ["{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] @@ -54,11 +54,11 @@ copy: {% for mem in range(1, NMEM_ENS + 1) %} # define variables # Declare a dict of search and replace terms to run on each template - {% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN':RUN, - 'YMD':previous_cycle | to_YMD, - 'HH':previous_cycle | strftime("%H"), - 'MEMDIR':"mem" + '%03d' % mem} %} + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} # we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, ntiles+1) %} From 4a2af934aa54408b083aa58760f4ec09fceabc2a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 17 Jul 2024 16:36:03 +0000 Subject: [PATCH 62/90] address some comments --- jobs/JGDAS_ENKF_SNOW_RECENTER | 3 +-- scripts/exgdas_enkf_sfc.sh | 5 ++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/jobs/JGDAS_ENKF_SNOW_RECENTER b/jobs/JGDAS_ENKF_SNOW_RECENTER index 3f9f180dcb..05d46cffc2 100755 --- a/jobs/JGDAS_ENKF_SNOW_RECENTER +++ b/jobs/JGDAS_ENKF_SNOW_RECENTER @@ -9,7 +9,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowrecen" -c "base esnowrecen" # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 GDUMP="gdas" -GDUMP_ENS="enkfgdas" export GDUMP ############################################## @@ -27,7 +26,7 @@ mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL mkdir -p "${COMOUT_SNOW_ANALYSIS}" done diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index e60f22bfb5..166734e408 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -200,7 +200,7 @@ if [ $DOIAU = "YES" ]; then COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - ${NCP} "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" if [[ ${GSI_SOILANAL} = "YES" ]]; then @@ -266,7 +266,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL - [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + [[ ! -d "${COM_ATMOS_RESTART_MEM}" ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" ${NCP} "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" @@ -280,7 +280,6 @@ fi ################################################################################ # Postprocessing cd "${pwd}" || exit 1 -[[ ${mkdata} = "YES" ]] && rm -rf "${DATA}" exit ${err} From 1717a163eb3f1f0d1c13dc7c6b8ca404f2ec331a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 17 Jul 2024 16:38:24 +0000 Subject: [PATCH 63/90] update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 5ae609e439..ca18845721 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 5ae609e4393dd2a4e43fba7e92caf42b193a4617 +Subproject commit ca18845721b860a74ac0ff9ae27a87fb86a0c78b From e72868a47a6a12850d350b4fff35df2d9638e754 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 17 Jul 2024 20:50:32 +0000 Subject: [PATCH 64/90] you shall not pass --- scripts/exgdas_enkf_sfc.sh | 2 +- ush/python/pygfs/task/snowens_analysis.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 166734e408..547ee64992 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -268,7 +268,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then [[ ! -d "${COM_ATMOS_RESTART_MEM}" ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - ${NCP} "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" done diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index a996f436c7..df433b78d4 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -131,8 +131,6 @@ def regridDetBkg(self) -> None: except Exception: raise WorkflowException(f"An error occured during execution of {exec_cmd}") - pass - @logit(logger) def regridDetInc(self) -> None: """Run fregrid to regrid the deterministic snow increment @@ -169,8 +167,6 @@ def regridDetInc(self) -> None: except Exception: raise WorkflowException(f"An error occured during execution of {exec_cmd}") - pass - @logit(logger) def recenterEns(self) -> None: """Run recentering code to create an ensemble of snow increments From f860cc4e77a92e4c66fd8800ae56124d7f14498b Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 17 Jul 2024 20:54:56 +0000 Subject: [PATCH 65/90] just remove mkdata stuff --- scripts/exgdas_enkf_sfc.sh | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 547ee64992..1944325317 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -68,16 +68,6 @@ export DELTSFC=${DELTSFC:-6} APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} -################################################################################ -# Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi -cd $DATA || exit 99 - - ################################################################################ # Update surface fields in the FV3 restart's using global_cycle. From 6d00e5cc10e02f453c8e962601520fb2c61c2a1c Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 19 Jul 2024 20:04:22 +0000 Subject: [PATCH 66/90] some updates --- parm/config/gfs/config.resources | 2 +- scripts/exgdas_enkf_snow_recenter.py | 1 + ush/python/pygfs/task/analysis.py | 2 + ush/python/pygfs/task/snowens_analysis.py | 45 +++++++++++++++++++++-- 4 files changed, 46 insertions(+), 4 deletions(-) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 7c15c22b9a..9983b6b97f 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -395,7 +395,7 @@ case ${step} in export layout_x export layout_y - export wtime_esnowrecen="00:15:00" + export wtime_esnowrecen="00:25:00" export npe_esnowrecen=$(( layout_x * layout_y * 6 )) export nth_esnowrecen=1 export npe_node_esnowrecen=$(( npe_node_max / nth_esnowrecen )) diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py index 63bf7306fa..5e831efedf 100755 --- a/scripts/exgdas_enkf_snow_recenter.py +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -21,6 +21,7 @@ # Instantiate the snow ensemble analysis task anl = SnowEnsAnalysis(config) anl.initialize() + anl.genWeights() anl.regridDetBkg() anl.regridDetInc() anl.recenterEns() diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index e407cf1765..bf47b9a950 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -28,6 +28,8 @@ def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) # Store location of GDASApp jinja2 templates self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') + # fix ocnres + self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" def initialize(self) -> None: super().initialize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index df433b78d4..e97036ec9d 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -3,6 +3,7 @@ import os from logging import getLogger from typing import Dict, List, Any +import netCDF4 as nc import numpy as np from wxflow import (AttrDict, @@ -95,6 +96,42 @@ def initialize(self) -> None: fregrid_copy = {'copy': [[os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]]} FileHandler(fregrid_copy).sync() + @logit(logger) + def genWeights(self) -> None: + """Create a modified land_frac file for use by fregrid + to interpolate the snow background from det to ensres + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the sfc restart and get the soil moisture + rst = nc.Dataset(f"./bkg/det/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc") + smc = rst.variables['smc'][:] + rst.close() + # open the oro data and get the land fraction + oro = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data.tile{tile}.nc") + land_frac = oro.variables['land_frac'][:] + oro.close() + # create an output file + ncfile = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight.tile{tile}.nc", mode='w', format='NETCDF4') + case_int = int(self.task_config.CASE[1:]) + lon = ncfile.createDimension('lon', case_int) + lat = ncfile.createDimension('lat', case_int) + land_frac_out = ncfile.createVariable('land_frac', np.float32, ('lon', 'lat')) + # mask the land fraction where soil moisture is less than 1 + land_frac[np.where(smc[0,0,...] == 1)] = 0 + land_frac_out[:] = land_frac + # write out and close the file + ncfile.close() + + @logit(logger) def regridDetBkg(self) -> None: """Run fregrid to regrid the deterministic snow background @@ -112,13 +149,14 @@ def regridDetBkg(self) -> None: f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", f"--input_dir ./bkg/det/", f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--scalar_field snodl,slmsk", + f"--scalar_field snodl,slmsk,vtype", f"--output_dir ./bkg/det_ensres/", f"--output_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", - f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", + f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", f"--weight_field land_frac", + f"--remap_file ./remap", ] fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) exec_cmd = Executable(fregrid) @@ -153,8 +191,9 @@ def regridDetInc(self) -> None: f"--output_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", - f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data", + f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", f"--weight_field land_frac", + f"--remap_file ./remap", ] fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) exec_cmd = Executable(fregrid) From dd493ad4cc14aea0dbfcff58a983feff1208a178 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 29 Jul 2024 19:19:48 +0000 Subject: [PATCH 67/90] address pynorms --- ush/python/pygfs/task/snowens_analysis.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index e97036ec9d..775557913b 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -126,12 +126,11 @@ def genWeights(self) -> None: lat = ncfile.createDimension('lat', case_int) land_frac_out = ncfile.createVariable('land_frac', np.float32, ('lon', 'lat')) # mask the land fraction where soil moisture is less than 1 - land_frac[np.where(smc[0,0,...] == 1)] = 0 + land_frac[np.where(smc[0, 0, ...] == 1)] = 0 land_frac_out[:] = land_frac # write out and close the file ncfile.close() - @logit(logger) def regridDetBkg(self) -> None: """Run fregrid to regrid the deterministic snow background From 2972225721e90bf958e6c6fdceef7ef8f231cb7d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 1 Aug 2024 13:26:36 +0000 Subject: [PATCH 68/90] Address clara's comments + fregrid_exe --- ush/python/pygfs/task/snowens_analysis.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 775557913b..1e37f2de26 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -124,10 +124,10 @@ def genWeights(self) -> None: case_int = int(self.task_config.CASE[1:]) lon = ncfile.createDimension('lon', case_int) lat = ncfile.createDimension('lat', case_int) - land_frac_out = ncfile.createVariable('land_frac', np.float32, ('lon', 'lat')) + lsm_frac_out = ncfile.createVariable('lsm_frac', np.float32, ('lon', 'lat')) # mask the land fraction where soil moisture is less than 1 land_frac[np.where(smc[0, 0, ...] == 1)] = 0 - land_frac_out[:] = land_frac + lsm_frac_out[:] = land_frac # write out and close the file ncfile.close() @@ -148,17 +148,17 @@ def regridDetBkg(self) -> None: f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", f"--input_dir ./bkg/det/", f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--scalar_field snodl,slmsk,vtype", + f"--scalar_field snodl", f"--output_dir ./bkg/det_ensres/", f"--output_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - f"--weight_field land_frac", + f"--weight_field lsm_frac", f"--remap_file ./remap", ] - fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) - exec_cmd = Executable(fregrid) + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + exec_cmd = Executable(fregrid_exe) try: logger.debug(f"Executing {exec_cmd}") @@ -191,11 +191,11 @@ def regridDetInc(self) -> None: f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", f"--interp_method conserve_order1", f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - f"--weight_field land_frac", + f"--weight_field lsm_frac", f"--remap_file ./remap", ] - fregrid = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) - exec_cmd = Executable(fregrid) + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + exec_cmd = Executable(fregrid_exe) try: logger.debug(f"Executing {exec_cmd}") From 1c555c6b917fdac0ba066d7b1c57f8e66a9dc0fc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 1 Aug 2024 15:18:36 +0000 Subject: [PATCH 69/90] shellcheck --- env/HERA.env | 2 -- env/JET.env | 2 -- env/ORION.env | 2 -- env/S4.env | 2 -- env/WCOSS2.env | 2 -- 5 files changed, 10 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 1df95a8dcb..18c8d584dc 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -108,8 +108,6 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" diff --git a/env/JET.env b/env/JET.env index a65b941527..43b519ee8f 100755 --- a/env/JET.env +++ b/env/JET.env @@ -91,8 +91,6 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" diff --git a/env/ORION.env b/env/ORION.env index 987c82e633..bc4eeec039 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -99,8 +99,6 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" diff --git a/env/S4.env b/env/S4.env index 6e7f19495c..7e0911012c 100755 --- a/env/S4.env +++ b/env/S4.env @@ -91,8 +91,6 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 5019799778..39d787cc2b 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -84,8 +84,6 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then - nth_max=$((npe_node_max / npe_node_esnowrecen)) - export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" From 5d7c9b7f0057aeeab89057c0b82e8db4fd35f7c1 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 1 Aug 2024 15:42:28 +0000 Subject: [PATCH 70/90] update from cdump to run --- workflow/rocoto/gfs_tasks.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 0fefa348d7..dd651a9b54 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -630,21 +630,21 @@ def snowanl(self): def esnowrecen(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prepsnowobs'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowobs'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}snowanl'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}snowanl'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dep_dict = {'type': 'metatask', 'name': f'{self.run}epmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esnowrecen') - task_name = f'{self.cdump}esnowrecen' + task_name = f'{self.run}esnowrecen' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, - 'cycledef': self.cdump.replace('enkf', ''), + 'cycledef': self.run.replace('enkf', ''), 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowrecen.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', @@ -2628,7 +2628,7 @@ def esfc(self): dep_dict = {'type': 'task', 'name': f'{self.run}eupd'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jedisnowda: - dep_dict = {'type': 'task', 'name': f'{self.cdump}esnowrecen'} + dep_dict = {'type': 'task', 'name': f'{self.run}esnowrecen'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From dd2439e40c96484333813f637c3c86e670af22eb Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 1 Aug 2024 16:47:59 +0000 Subject: [PATCH 71/90] update fregrid arg lists --- ush/python/pygfs/task/snowens_analysis.py | 50 ++++++++++++----------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 1e37f2de26..5eaf0b1d75 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -145,20 +145,21 @@ def regridDetBkg(self) -> None: chdir(self.task_config.DATA) arg_list = [ - f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", - f"--input_dir ./bkg/det/", - f"--input_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--scalar_field snodl", - f"--output_dir ./bkg/det_ensres/", - f"--output_file {to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", - f"--interp_method conserve_order1", - f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - f"--weight_field lsm_frac", - f"--remap_file ./remap", + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./bkg/det/", + "--input_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./bkg/det_ensres/", + "--output_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", ] - fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') exec_cmd = Executable(fregrid_exe) + exec_cmd(*arg_list) try: logger.debug(f"Executing {exec_cmd}") @@ -182,20 +183,21 @@ def regridDetInc(self) -> None: chdir(self.task_config.DATA) arg_list = [ - f"--input_mosaic ./orog/det/{self.task_config.CASE}_mosaic.nc", - f"--input_dir ./inc/det/", - f"--input_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--scalar_field snodl", - f"--output_dir ./inc/det_ensres/", - f"--output_file snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", - f"--output_mosaic ./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", - f"--interp_method conserve_order1", - f"--weight_file ./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - f"--weight_field lsm_frac", - f"--remap_file ./remap", + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./inc/det/", + "--input_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./inc/det_ensres/", + "--output_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", ] - fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + " " + " ".join(arg_list) + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') exec_cmd = Executable(fregrid_exe) + exec_cmd(*arg_list) try: logger.debug(f"Executing {exec_cmd}") From 0422c9fc8bffa75fba869693cb29fd81b43726f0 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 1 Aug 2024 18:22:32 +0000 Subject: [PATCH 72/90] fix comments --- ush/python/pygfs/task/snowens_analysis.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 5eaf0b1d75..6ddc358334 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -159,11 +159,10 @@ def regridDetBkg(self) -> None: ] fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') exec_cmd = Executable(fregrid_exe) - exec_cmd(*arg_list) try: logger.debug(f"Executing {exec_cmd}") - exec_cmd() + exec_cmd(*arg_list) except OSError: raise OSError(f"Failed to execute {exec_cmd}") except Exception: @@ -197,11 +196,10 @@ def regridDetInc(self) -> None: ] fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') exec_cmd = Executable(fregrid_exe) - exec_cmd(*arg_list) try: logger.debug(f"Executing {exec_cmd}") - exec_cmd() + exec_cmd(*arg_list) except OSError: raise OSError(f"Failed to execute {exec_cmd}") except Exception: From df156fb8a4a5078712e15505bfe4f084eedb7f80 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 2 Aug 2024 13:39:07 +0000 Subject: [PATCH 73/90] Fix npe_apply_inc issue --- env/HERA.env | 6 +++--- env/HERCULES.env | 6 +++--- env/JET.env | 4 ++-- env/ORION.env | 4 ++-- env/S4.env | 6 +++--- env/WCOSS2.env | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 18c8d584dc..697cf21965 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -80,7 +80,7 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -104,14 +104,14 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "marinebmat" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 99c64554a2..83d934c91a 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -103,16 +103,16 @@ case ${step} in export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" ;; "esnowrecen") export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" ;; - + "marinebmat") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/JET.env b/env/JET.env index 43b519ee8f..473539ded1 100755 --- a/env/JET.env +++ b/env/JET.env @@ -87,14 +87,14 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/ORION.env b/env/ORION.env index bc4eeec039..65a8871cdd 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -95,14 +95,14 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/S4.env b/env/S4.env index 7e0911012c..3d39fe4c75 100755 --- a/env/S4.env +++ b/env/S4.env @@ -68,7 +68,7 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN}" + export APRUN_ATMENSANLFV3INC="${APRUN}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -87,14 +87,14 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6 elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 39d787cc2b..572862ca63 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -80,14 +80,14 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" - export APRUN_APPLY_INCR="${launcher} -n ${npe_apply_incr}" + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then From bfc9e43361e73cd24e2aaa6abf0a755904844ff9 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 2 Aug 2024 13:41:20 +0000 Subject: [PATCH 74/90] fix missing quote --- env/S4.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/S4.env b/env/S4.env index 3d39fe4c75..d0985e44ca 100755 --- a/env/S4.env +++ b/env/S4.env @@ -87,7 +87,7 @@ elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN}" - export APRUN_APPLY_INCR="${launcher} -n 6 + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then From 09b11339fca5a06d52b0a86556fb3f1c81c3f8c7 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 2 Aug 2024 14:03:44 +0000 Subject: [PATCH 75/90] remove CDUMP --- env/HERA.env | 7 +- env/HERCULES.env | 7 +- env/JET.env | 11 +- env/ORION.env | 7 +- env/S4.env | 7 +- env/WCOSS2.env | 7 +- jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX | 46 +++ jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE | 16 +- jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE | 10 +- ..._RUN => JGLOBAL_AERO_ANALYSIS_VARIATIONAL} | 4 +- jobs/rocoto/aeroanlgenb.sh | 19 ++ jobs/rocoto/{aeroanlrun.sh => aeroanlvar.sh} | 4 +- parm/config/gfs/config.aeroanl | 24 +- parm/config/gfs/config.aeroanlgenb | 29 ++ parm/config/gfs/config.aeroanlrun | 11 - parm/config/gfs/config.aeroanlvar | 11 + parm/config/gfs/config.com | 1 + parm/config/gfs/config.resources | 54 +++- parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 | 19 ++ parm/gdas/aero_finalize_variational.yaml.j2 | 23 ++ parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 | 38 +++ parm/gdas/aero_stage_variational.yaml.j2 | 50 +++ parm/ufs/gocart/ExtData.other | 20 +- .../exgdas_aero_analysis_generate_bmatrix.py | 27 ++ ... => exglobal_aero_analysis_variational.py} | 6 +- sorc/gdas.cd | 2 +- sorc/gfs_utils.fd | 2 +- sorc/link_workflow.sh | 2 + ush/forecast_postdet.sh | 12 + ush/python/pygfs/__init__.py | 2 + ush/python/pygfs/task/aero_analysis.py | 129 +------- ush/python/pygfs/task/aero_bmatrix.py | 294 ++++++++++++++++++ ush/python/pygfs/task/analysis.py | 2 +- ush/python/pygfs/task/bmatrix.py | 28 ++ workflow/applications/gfs_cycled.py | 6 +- workflow/rocoto/gfs_tasks.py | 51 ++- workflow/rocoto/tasks.py | 2 +- 37 files changed, 797 insertions(+), 193 deletions(-) create mode 100755 jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX rename jobs/{JGLOBAL_AERO_ANALYSIS_RUN => JGLOBAL_AERO_ANALYSIS_VARIATIONAL} (83%) create mode 100755 jobs/rocoto/aeroanlgenb.sh rename jobs/rocoto/{aeroanlrun.sh => aeroanlvar.sh} (83%) create mode 100644 parm/config/gfs/config.aeroanlgenb delete mode 100644 parm/config/gfs/config.aeroanlrun create mode 100644 parm/config/gfs/config.aeroanlvar create mode 100644 parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 create mode 100644 parm/gdas/aero_finalize_variational.yaml.j2 create mode 100644 parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 create mode 100644 parm/gdas/aero_stage_variational.yaml.j2 create mode 100755 scripts/exgdas_aero_analysis_generate_bmatrix.py rename scripts/{exglobal_aero_analysis_run.py => exglobal_aero_analysis_variational.py} (84%) create mode 100644 ush/python/pygfs/task/aero_bmatrix.py create mode 100644 ush/python/pygfs/task/bmatrix.py diff --git a/env/HERA.env b/env/HERA.env index 3f0e7c9f36..ae82bfac02 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -82,13 +82,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/HERCULES.env b/env/HERCULES.env index 83fa1aadd1..151a2da251 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -86,12 +86,17 @@ case ${step} in export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" ;; - "aeroanlrun") + "aeroanlvar") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" + ;; + "aeroanlgenb") + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" ;; "prepobsaero") diff --git a/env/JET.env b/env/JET.env index 810a8cd501..d93d8438fc 100755 --- a/env/JET.env +++ b/env/JET.env @@ -70,13 +70,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${launcher} ${ntasks}" -elif [[ "${step}" = "aeroanlrun" ]]; then - - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" +elif [[ "${step}" = "aeroanlvar" ]]; then export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/env/ORION.env b/env/ORION.env index bbbfb59182..25fba6cfa6 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -78,13 +78,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/env/S4.env b/env/S4.env index 840ca65898..5b7432104c 100755 --- a/env/S4.env +++ b/env/S4.env @@ -70,13 +70,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 18caf1bc03..89cc51da97 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -63,13 +63,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX b/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX new file mode 100755 index 0000000000..81c89e9155 --- /dev/null +++ b/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aeroanlgenb" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL \ + COMOUT_CHEM_BMAT:COM_CHEM_BMAT_TMPL \ + COMIN_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL + +mkdir -p "${COMOUT_CHEM_BMAT}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASAEROBMATPY:-${SCRgfs}/exgdas_aero_analysis_generate_bmatrix.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index 455f572da5..b894b82531 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -8,25 +8,17 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroan ############################################## # Set variables used in the script ############################################## -# shellcheck disable=SC2153 -GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -gcyc=${GDATE:8:2} -GDUMP="gdas" - ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS - -RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL \ + COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL -mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" +mkdir -p "${COMOUT_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index b2a2893bc0..5be8767308 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -19,13 +19,15 @@ GDUMP="gdas" ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COM_OBS:COM_OBS_TMPL \ + COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL \ + COMIN_CHEM_BMAT_PREV:COM_CHEM_BMAT_TMPL -mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" +mkdir -p "${COMOUT_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_RUN b/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL similarity index 83% rename from jobs/JGLOBAL_AERO_ANALYSIS_RUN rename to jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL index 43749b78c5..290d7225dd 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_RUN +++ b/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL @@ -3,7 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlrun" +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlvar" ############################################## # Set variables used in the script @@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlr ############################################################### # Run relevant script -EXSCRIPT=${GDASAERORUNSH:-${SCRgfs}/exglobal_aero_analysis_run.py} +EXSCRIPT=${GDASAEROVARSH:-${SCRgfs}/exglobal_aero_analysis_variational.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/aeroanlgenb.sh b/jobs/rocoto/aeroanlgenb.sh new file mode 100755 index 0000000000..d0bc5dda9b --- /dev/null +++ b/jobs/rocoto/aeroanlgenb.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlgenb" +export jobid="${job}.$$" + +############################################################### + +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlvar.sh similarity index 83% rename from jobs/rocoto/aeroanlrun.sh rename to jobs/rocoto/aeroanlvar.sh index 529bb2d7d1..7aa7d831f9 100755 --- a/jobs/rocoto/aeroanlrun.sh +++ b/jobs/rocoto/aeroanlvar.sh @@ -8,11 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="aeroanlrun" +export job="aeroanlvar" export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN" +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL" status=$? exit "${status}" diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 24a5e92644..19766062d9 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -5,20 +5,36 @@ echo "BEGIN: config.aeroanl" -export CASE_ANL=${CASE} +# define analysis resolution based on deterministic res +case ${CASE} in + "C1152" | "C768" | "C384" | "C192") + CASE_ANL="C192" + ;; + "C96" | "C48") + CASE_ANL=${CASE} + ;; + *) + echo "FATAL ERROR: Aerosol DA not supported at ${CASE} resolution" + exit 4 +esac +export CASE_ANL export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" -export STATICB_TYPE='identity' +export STATICB_TYPE='diffusion' export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" -export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" -export BERROR_DATE="20160630.000000" +export BERROR_DATA_DIR="${FIXgfs}/gdas/aero/clim_b" export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" +export AERO_STAGE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_stage_variational.yaml.j2" +export AERO_FINALIZE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_finalize_variational.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE="${EXECgfs}/gdas.x" +export BMATEXE="${EXECgfs}/gdasapp_chem_diagb.x" +export DIFFUSIONEXE="${EXECgfs}/gdas_fv3jedi_error_covariance_toolbox.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" diff --git a/parm/config/gfs/config.aeroanlgenb b/parm/config/gfs/config.aeroanlgenb new file mode 100644 index 0000000000..b41b22a524 --- /dev/null +++ b/parm/config/gfs/config.aeroanlgenb @@ -0,0 +1,29 @@ +#!/bin/bash -x + +########## config.aeroanlgenb ########## +# Aerosol Variance specific + +echo "BEGIN: config.aeroanlgenb" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlgenb + +export BMATYAML="${PARMgfs}/gdas/aero/berror/aero_diagb.yaml.j2" +export DIFFUSIONYAML="${PARMgfs}/gdas/aero/berror/aero_diffusionparm.yaml.j2" +export INTERPYAML="${PARMgfs}/gdas/aero/berror/aero_interp.yaml.j2" +export AERO_BMATRIX_STAGE_TMPL="${PARMgfs}/gdas/aero_stage_bmatrix_bkg.yaml.j2" +export AERO_BMATRIX_FINALIZE_TMPL="${PARMgfs}/gdas/aero_finalize_bmatrix_bkg.yaml.j2" +export aero_diffusion_iter=10 +export aero_diffusion_horiz_len=2500e3 +export aero_diffusion_fixed_val=1.0 +export npx_clim_b=97 +export npy_clim_b=97 +export aero_diagb_weight=0.9 +export aero_staticb_rescaling_factor=2.0 +export aero_diagb_rescale=20.0 +export aero_diagb_n_halo=4 +export aero_diagb_n_neighbors=16 +export aero_diagb_smooth_horiz_iter=0 +export aero_diagb_smooth_vert_iter=0 + +echo "END: config.aeroanlgenb" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun deleted file mode 100644 index 012e5b79f3..0000000000 --- a/parm/config/gfs/config.aeroanlrun +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -x - -########## config.aeroanlrun ########## -# Aerosol Analysis specific - -echo "BEGIN: config.aeroanlrun" - -# Get task specific resources -source "${EXPDIR}/config.resources" aeroanlrun - -echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.aeroanlvar b/parm/config/gfs/config.aeroanlvar new file mode 100644 index 0000000000..4282b6c840 --- /dev/null +++ b/parm/config/gfs/config.aeroanlvar @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlvar ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlvar" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlvar + +echo "END: config.aeroanlvar" diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 222ffdae95..818ea38017 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -98,5 +98,6 @@ declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' +declare -rx COM_CHEM_BMAT_TMPL=${COM_CHEM_ANALYSIS_TMPL}'/bmatrix' declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 9ddb85a87a..9c576aa76b 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -16,7 +16,7 @@ if (( $# != 1 )); then echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl" - echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" + echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" @@ -355,12 +355,12 @@ case ${step} in layout_y=8 ;; "C384") - layout_x=8 - layout_y=8 + layout_x=6 + layout_y=6 ;; "C192" | "C96") - layout_x=8 - layout_y=8 + layout_x=4 + layout_y=4 ;; "C48" ) # this case is for testing only @@ -381,27 +381,61 @@ case ${step} in memory="3072M" ;; - "aeroanlrun") + "aeroanlvar") case ${CASE} in "C768") layout_x=8 layout_y=8 ;; "C384") - layout_x=8 - layout_y=8 + layout_x=6 + layout_y=6 ;; "C192" | "C96") + layout_x=4 + layout_y=4 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export is_exclusive=True + ;; + + "aeroanlgenb") + case ${CASE} in + "C768") layout_x=8 layout_y=8 ;; + "C384") + layout_x=6 + layout_y=6 + ;; + "C192" | "C96") + layout_x=4 + layout_y=4 + ;; "C48" ) # this case is for testing only layout_x=1 layout_y=1 ;; *) - echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" exit 4 esac @@ -413,8 +447,10 @@ case ${step} in threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; + "aeroanlfinal") walltime="00:10:00" ntasks=1 diff --git a/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 new file mode 100644 index 0000000000..b33f280945 --- /dev/null +++ b/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 @@ -0,0 +1,19 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set HEAD = RUN + ".t" + cycle_HH + "z." %} +{% set offset_td = "+6H" | to_timedelta %} +{% set background_time = current_cycle | add_to_datetime(offset_td) %} +copy: +### copy YAMLs used +{% set yaml_list = ['chem_diagb.yaml', 'chem_diffusion.yaml'] %} +{% for fname in yaml_list %} +- ["{{ DATA }}/{{ HEAD }}{{ fname }}", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}{{ fname }}"] +{% endfor %} +### copy stddev files to ROTDIR +{% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +### copy coupler file +- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.coupler.res", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.coupler.res"] +### copy diffusion files +- ["{{ DATA }}/diffusion/diffusion_hz.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_hz.nc"] +- ["{{ DATA }}/diffusion/diffusion_vt.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_vt.nc"] diff --git a/parm/gdas/aero_finalize_variational.yaml.j2 b/parm/gdas/aero_finalize_variational.yaml.j2 new file mode 100644 index 0000000000..b9247bcd62 --- /dev/null +++ b/parm/gdas/aero_finalize_variational.yaml.j2 @@ -0,0 +1,23 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkgtime = AERO_WINDOW_BEGIN %} +{% else %} + {% set bkgtime = current_cycle %} +{% endif %} +###################################### +mkdir: +- "{{ COMOUT_CHEM_ANALYSIS }}" +- "{{ COMOUT_ATMOS_RESTART }}" +copy: +## copy variational YAML to ROTDIR +- ["{{ DATA }}/{{ APREFIX }}aerovar.yaml", "{{ COMOUT_CHEM_ANALYSIS }}/{{ APREFIX }}aerovar.yaml"] +## copy increments +{% for tile in range(1,ntiles+1) %} +- ["{{ DATA }}/anl/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_ANALYSIS }}/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +## copy analysis +{% for tile in range(1,ntiles+1) %} +- ["{{ DATA }}/anl/{{ bkgtime | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_ATMOS_RESTART }}/{{ bkgtime | to_fv3time }}.aeroanl_fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} diff --git a/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 new file mode 100644 index 0000000000..9005b9ff12 --- /dev/null +++ b/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 @@ -0,0 +1,38 @@ +###################################### +# set some variables +###################################### +{% set offset_td = "+6H" | to_timedelta %} +{% set background_time = current_cycle | add_to_datetime(offset_td) %} +{% set ftype_list = ['fv_core.res', 'fv_tracer.res'] %} +###################################### +# create working directories +###################################### +mkdir: +- "{{ DATA }}/bkg" +- "{{ DATA }}/stddev" +- "{{ DATA }}/clm_stddev" +- "{{ DATA }}/diffusion" +copy: +###################################### +# copy deterministic background files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD, + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.coupler.res"] +{% for ftype in ftype_list %} + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc"] + {% endfor %} +{% endfor %} +# copy climatological stddev files +###################################### +{% for tile in range(1, ntiles+1) %} +- ["{{ BERROR_DATA_DIR }}/stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/clm_stddev/stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} + diff --git a/parm/gdas/aero_stage_variational.yaml.j2 b/parm/gdas/aero_stage_variational.yaml.j2 new file mode 100644 index 0000000000..afd0e1b946 --- /dev/null +++ b/parm/gdas/aero_stage_variational.yaml.j2 @@ -0,0 +1,50 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkg_times = [] %} + {% for fh in range(0, 7, 3) %} + {% set offset = fh | string + "H" %} + {% set fcst_timedelta = offset | to_timedelta %} + {% set fcst_time = AERO_WINDOW_BEGIN | add_to_datetime(fcst_timedelta) %} + {% do bkg_times.append(fcst_time) %} + {% endfor %} +{% else %} + {% set bkg_times = [] %} + {% do bkg_times.append(current_cycle) %} +{% endif %} +{% set fvfiles = ['fv_core.res.', 'fv_tracer.res.'] %} +###################################### +mkdir: +- "{{ DATA }}/anl" +- "{{ DATA }}/diags" +- "{{ DATA }}/berror" +- "{{ DATA }}/bkg" +copy: +###################################### +## copy backgrounds +{% for bkgtime in bkg_times %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.coupler.res"] + {% for fvfile in fvfiles %} + {% for tile in range(1,ntiles+1) %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc"] + {% endfor %} + {% endfor %} +{% endfor %} +###################################### +## copy backgrounds again for fv_tracer to create analysis files later +{% for tile in range(1,ntiles+1) %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/anl/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} + +###################################### +## copy berror files from COMIN_CHEM_BMAT_PREV +## stddev files +{% for tile in range(1, ntiles+1) %} +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +### copy coupler file +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.coupler.res", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.coupler.res"] +### copy diffusion files +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_hz.nc", "{{ DATA }}/berror/diffusion_hz.nc"] +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_vt.nc", "{{ DATA }}/berror/diffusion_vt.nc"] diff --git a/parm/ufs/gocart/ExtData.other b/parm/ufs/gocart/ExtData.other index 7a0d63d6ca..5d2ddc5102 100644 --- a/parm/ufs/gocart/ExtData.other +++ b/parm/ufs/gocart/ExtData.other @@ -17,12 +17,12 @@ DU_UTHRES '1' Y E - none none uthres ExtData/n #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -SU_ANTHROL1 NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_ANTHROL2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL1 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Ship emissions -SU_SHIPSO2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_SHIPSO4 NA N Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO4 NA Y Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption SU_AIRCRAFT NA Y Y %y4-%m2-%d2t12:00:00 none none none /dev/null @@ -63,11 +63,11 @@ OC_MTPO NA Y Y %y4-%m2-%d2t12:00:00 none none mtpo ExtData/nexus/MEGAN_ OC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -OC_ANTEOC1 NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -OC_ANTEOC2 NA N Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC1 NA Y Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC2 NA Y Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -OC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption OC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null @@ -88,11 +88,11 @@ pSOA_ANTHRO_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null BC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -BC_ANTEBC1 NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -BC_ANTEBC2 NA N Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC1 NA Y Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC2 NA Y Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -BC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption BC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none bc_aviation /dev/null diff --git a/scripts/exgdas_aero_analysis_generate_bmatrix.py b/scripts/exgdas_aero_analysis_generate_bmatrix.py new file mode 100755 index 0000000000..0d8389c40d --- /dev/null +++ b/scripts/exgdas_aero_analysis_generate_bmatrix.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# exgdas_aero_analysis_generate_bmatrix.py +# This script creates an AerosolBMatrix object +# and runs the methods needed +# to stage files, compute the variance, and write to com +# files needed for the variational solver +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.aero_bmatrix import AerosolBMatrix + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the aerosol variance and diffusion correlation tasks + AeroB = AerosolBMatrix(config) + AeroB.initialize() + AeroB.interpBackground() + AeroB.computeVariance() + AeroB.computeDiffusion() + AeroB.finalize() diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_variational.py similarity index 84% rename from scripts/exglobal_aero_analysis_run.py rename to scripts/exglobal_aero_analysis_variational.py index 85f4b963a4..dd5bb4f65a 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_variational.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -# exglobal_aero_analysis_run.py +# exglobal_aero_analysis_variational.py # This script creates an AerosolAnalysis object -# and runs the execute method +# and runs the variational method # which executes the global aerosol variational analysis import os @@ -19,4 +19,4 @@ # Instantiate the aerosol analysis task AeroAnl = AerosolAnalysis(config) - AeroAnl.execute() + AeroAnl.variational() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 52f41a298b..5583fbb688 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 52f41a298b4c6b7bbf6f203b6579516819fbbf36 +Subproject commit 5583fbb6884a9b98fb3df76058ea2f51414299a2 diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 02ce084c24..279bbf2097 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 02ce084c244823e22661d493a50236b7d5eaf70a +Subproject commit 279bbf2097d87321294436d17bf5b73c4c07ab4a diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 9722f5a2b8..505e60b746 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -367,9 +367,11 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("gdas.x" \ "gdas_soca_gridgen.x" \ "gdas_soca_error_covariance_toolbox.x" \ + "gdas_fv3jedi_error_covariance_toolbox.x" \ "gdas_soca_setcorscales.x" \ "gdas_soca_diagb.x" \ "fv3jedi_plot_field.x" \ + "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 7de31d6235..bd15ec8f92 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -60,6 +60,18 @@ FV3_postdet() { break fi done + # Replace fv_tracer with aeroanl_fv_tracer restart files from current cycle (if found) + local nn + for (( nn = 1; nn <= ntiles; nn++ )); do + if [[ -f "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" ]]; then + rm -f "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" + ${NCP} "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" \ + "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" + else + echo "WARNING: 'aeroanl_fv_tracer.res.tile1.nc' not found in '${COMOUT_ATMOS_RESTART}', using 'fv_tracer.res.tile1.nc'" + break + fi + done fi # if [[ "${RERUN}" != "YES" ]]; then fi # if [[ "${warm_start}" == ".true." ]]; then diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index c0b72bbc35..e1ff7e6862 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -2,8 +2,10 @@ import os from .task.analysis import Analysis +from .task.bmatrix import BMatrix from .task.aero_emissions import AerosolEmissions from .task.aero_analysis import AerosolAnalysis +from .task.aero_bmatrix import AerosolBMatrix from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis from .task.marine_bmat import MarineBMat diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 69a992d7d4..2a22bd5632 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -82,29 +82,18 @@ def initialize(self: Analysis) -> None: jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() - # stage berror files - # copy BUMP files, otherwise it will assume ID matrix - if self.task_config.get('STATICB_TYPE', 'identity') in ['bump']: - FileHandler(self.get_berror_dict(self.task_config)).sync() - - # stage backgrounds - FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() + # stage files from COM and create working directories + logger.info(f"Staging files prescribed from {self.task_config.AERO_STAGE_VARIATIONAL_TMPL}") + aero_var_stage_list = parse_j2yaml(self.task_config.AERO_STAGE_VARIATIONAL_TMPL, self.task_config) + FileHandler(aero_var_stage_list).sync() # generate variational YAML file logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") - # need output dir for diags and anl - logger.debug("Create empty output [anl, diags] directories to receive output from executable") - newdirs = [ - os.path.join(self.task_config['DATA'], 'anl'), - os.path.join(self.task_config['DATA'], 'diags'), - ] - FileHandler({'mkdir': newdirs}).sync() - @logit(logger) - def execute(self: Analysis) -> None: + def variational(self: Analysis) -> None: chdir(self.task_config.DATA) @@ -140,13 +129,15 @@ def finalize(self: Analysis) -> None: """ # ---- tar up diags # path of output tar statfile - aerostat = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") + logger.info('Preparing observation space diagnostics for archiving') + aerostat = os.path.join(self.task_config.COMOUT_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config['DATA'], 'diags', 'diag*nc4')) # gzip the files first for diagfile in diags: + logger.info(f'Adding {diagfile} to tar file') with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: f_out.writelines(f_in) @@ -155,44 +146,16 @@ def finalize(self: Analysis) -> None: for diagfile in diags: diaggzip = f"{diagfile}.gz" archive.add(diaggzip, arcname=os.path.basename(diaggzip)) - - # copy full YAML from executable to ROTDIR - src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") - yaml_copy = { - 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], - 'copy': [[src, dest]] - } - FileHandler(yaml_copy).sync() - - # ---- copy RESTART fv_tracer files for future reference - if self.task_config.DOIAU: - bkgtime = self.task_config.AERO_WINDOW_BEGIN - else: - bkgtime = self.task_config.current_cycle - template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(bkgtime), '{tilenum}') - bkglist = [] - for itile in range(1, self.task_config.ntiles + 1): - tracer = template.format(tilenum=itile) - src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, tracer) - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f'aeroges.{tracer}') - bkglist.append([src, dest]) - FileHandler({'copy': bkglist}).sync() + logger.info(f'Saved diags to {aerostat}') # ---- add increments to RESTART files logger.info('Adding increments to RESTART files') self._add_fms_cube_sphere_increments() - # ---- move increments to ROTDIR - logger.info('Moving increments to ROTDIR') - template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' - inclist = [] - for itile in range(1, self.task_config.ntiles + 1): - tracer = template.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', tracer) - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, tracer) - inclist.append([src, dest]) - FileHandler({'copy': inclist}).sync() + # copy files back to COM + logger.info(f"Copying files to COM based on {self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL}") + aero_var_final_list = parse_j2yaml(self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL, self.task_config) + FileHandler(aero_var_final_list).sync() def clean(self): super().clean() @@ -209,7 +172,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: restart_template = f'{to_fv3time(bkgtime)}.fv_tracer.res.tile{{tilenum}}.nc' increment_template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) - bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) + bkg_template = os.path.join(self.task_config.DATA, 'anl', restart_template) # get list of increment vars incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] @@ -232,38 +195,7 @@ def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: bkg_dict: Dict a dictionary containing the list of model background files to copy for FileHandler """ - # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - - # get FV3 RESTART files, this will be a lot simpler when using history files - rst_dir = task_config.COM_ATMOS_RESTART_PREV - run_dir = os.path.join(task_config['DATA'], 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # if using IAU, we can use FGAT - bkgtimes = [] - begintime = task_config.previous_cycle - for fcsthr in task_config.aero_bkg_fhr: - bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) - - # now loop over background times - for bkgtime in bkgtimes: - # aerosol DA needs coupler - basename = f'{to_fv3time(bkgtime)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # aerosol DA only needs core/tracer - for ftype in ['core', 'tracer']: - template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist, - } + bkg_dict = {} return bkg_dict @logit(logger) @@ -285,34 +217,5 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: berror_dict: Dict a dictionary containing the list of background error files to copy for FileHandler """ - # aerosol static-B needs nicas, cor_rh, cor_rv and stddev files. - b_dir = config.BERROR_DATA_DIR - b_datestr = to_fv3time(config.BERROR_DATE) - berror_list = [] - - for ftype in ['stddev']: - coupler = f'{b_datestr}.{ftype}.coupler.res' - berror_list.append([ - os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) - ]) - template = f'{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - tracer = template.format(tilenum=itile) - berror_list.append([ - os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) - ]) - radius = 'cor_aero_universe_radius' - berror_list.append([ - os.path.join(b_dir, radius), os.path.join(config.DATA, 'berror', radius) - ]) - nproc = config.ntiles * config.layout_x * config.layout_y - for nn in range(1, nproc + 1): - berror_list.append([ - os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), - os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') - ]) - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } + berror_dict = {} return berror_dict diff --git a/ush/python/pygfs/task/aero_bmatrix.py b/ush/python/pygfs/task/aero_bmatrix.py new file mode 100644 index 0000000000..9431e45802 --- /dev/null +++ b/ush/python/pygfs/task/aero_bmatrix.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import List, Dict, Any, Union + +from wxflow import (AttrDict, FileHandler, rm_p, + add_to_datetime, to_fv3time, to_timedelta, + to_fv3time, chdir, Executable, WorkflowException, + parse_j2yaml, save_as_yaml, logit) +from pygfs.task.bmatrix import BMatrix + +logger = getLogger(__name__.split('.')[-1]) + + +class AerosolBMatrix(BMatrix): + """ + Class for global aerosol BMatrix tasks + """ + @logit(logger, name="AerosolBMatrix") + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + + _res = int(self.task_config['CASE'][1:]) + _res_anl = int(self.task_config['CASE_ANL'][1:]) + + _bmat_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diagb.yaml") + _diffusion_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diffusion.yaml") + _convertstate_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_convertstate.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.task_config['LEVS'] - 1, + 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')), + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'bmat_yaml': _bmat_yaml, + 'diffusion_yaml': _diffusion_yaml, + 'convertstate_yaml': _convertstate_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self: BMatrix) -> None: + super().initialize() + # stage fix files + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + logger.info(f"Staging backgrounds prescribed from {self.task_config.AERO_BMATRIX_STAGE_TMPL}") + aero_bmat_stage_list = parse_j2yaml(self.task_config.AERO_BMATRIX_STAGE_TMPL, self.task_config) + FileHandler(aero_bmat_stage_list).sync() + + # generate convert state YAML file + logger.info(f"Generate convert state YAML file: {self.task_config.convertstate_yaml}") + self.task_config.convertstate_config = parse_j2yaml(self.task_config.INTERPYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.convertstate_config, self.task_config.convertstate_yaml) + logger.info(f"Wrote convert state YAML to: {self.task_config.convertstate_yaml}") + + # generate diagb YAML file + logger.info(f"Generate bmat YAML file: {self.task_config.bmat_yaml}") + self.task_config.bmat_config = parse_j2yaml(self.task_config.BMATYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.bmat_config, self.task_config.bmat_yaml) + logger.info(f"Wrote bmat YAML to: {self.task_config.bmat_yaml}") + + # generate diffusion parameters YAML file + logger.info(f"Generate diffusion YAML file: {self.task_config.diffusion_yaml}") + self.task_config.diffusion_config = parse_j2yaml(self.task_config.DIFFUSIONYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.diffusion_config, self.task_config.diffusion_yaml) + logger.info(f"Wrote diffusion YAML to: {self.task_config.diffusion_yaml}") + + # link executable to run directory + self.link_bmatexe() + self.link_diffusion_exe() + self.link_jediexe() + + @logit(logger) + def interpBackground(self) -> None: + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_AEROGENB) + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('convertstate') + exec_cmd.add_default_arg(self.task_config.convertstate_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def computeVariance(self) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_AEROGENB) + exec_name = os.path.join(self.task_config.DATA, 'gdasapp_chem_diagb.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.bmat_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def computeDiffusion(self) -> None: + + chdir(self.task_config.DATA) + + exec_cmd_diffusion = Executable(self.task_config.APRUN_AEROGENB) + exec_name_diffusion = os.path.join(self.task_config.DATA, 'gdas_fv3jedi_error_covariance_toolbox.x') + exec_cmd_diffusion.add_default_arg(exec_name_diffusion) + exec_cmd_diffusion.add_default_arg(self.task_config.diffusion_yaml) + + try: + logger.debug(f"Executing {exec_cmd_diffusion}") + exec_cmd_diffusion() + except OSError: + raise OSError(f"Failed to execute {exec_cmd_diffusion}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd_diffusion}") + + pass + + @logit(logger) + def finalize(self) -> None: + super().finalize() + # save files to COMOUT + logger.info(f"Saving files to COMOUT based on {self.task_config.AERO_BMATRIX_FINALIZE_TMPL}") + aero_bmat_finalize_list = parse_j2yaml(self.task_config.AERO_BMATRIX_FINALIZE_TMPL, self.task_config) + FileHandler(aero_bmat_finalize_list).sync() + + @logit(logger) + def link_jediexe(self) -> None: + """ + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return exe_dest + + @logit(logger) + def link_bmatexe(self) -> None: + """ + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.BMATEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return + + @logit(logger) + def link_diffusion_exe(self) -> None: + """ + + This method links a JEDI (fv3jedi_error_covariance_toolbox.x) + executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + + exe_src_diffusion = self.task_config.DIFFUSIONEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src_diffusion} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest_diffusion = os.path.join(self.task_config.DATA, os.path.basename(exe_src_diffusion)) + if os.path.exists(exe_dest_diffusion): + rm_p(exe_dest_diffusion) + os.symlink(exe_src_diffusion, exe_dest_diffusion) + + return + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global aerosol DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = task_config.COM_ATMOS_RESTART_PREV + run_dir = os.path.join(task_config['DATA'], 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # if using IAU, we can use FGAT + bkgtimes = [] + begintime = task_config.previous_cycle + for fcsthr in task_config.aero_bkg_fhr: + bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) + + # now loop over background times + for bkgtime in bkgtimes: + # aerosol DA needs coupler + basename = f'{to_fv3time(bkgtime)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # aerosol DA only needs core/tracer + for ftype in ['core', 'tracer']: + template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index e407cf1765..0fc07467a0 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -196,7 +196,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li @logit(logger) def link_jediexe(self) -> None: - """Compile a dictionary of background error files to copy + """ This method links a JEDI executable to the run directory diff --git a/ush/python/pygfs/task/bmatrix.py b/ush/python/pygfs/task/bmatrix.py new file mode 100644 index 0000000000..d0edba2358 --- /dev/null +++ b/ush/python/pygfs/task/bmatrix.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import List, Dict, Any, Union + +from wxflow import (parse_j2yaml, FileHandler, logit, + Task, Executable, WorkflowException) + +logger = getLogger(__name__.split('.')[-1]) + + +class BMatrix(Task): + """Parent class for GDAS BMatrix tasks + + The BMatrix class is the parent class for all + Global Data Assimilation System (GDAS) BMatrix tasks + """ + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + # Store location of GDASApp jinja2 templates + self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') + + def initialize(self) -> None: + super().initialize() + + def finalize(self) -> None: + super().finalize() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index e049a7d422..823031ce47 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -107,7 +107,7 @@ def _get_app_configs(self): configs += ['waveawipsbulls', 'waveawipsgridded'] if self.do_aero: - configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + configs += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: configs += ['prepobsaero'] @@ -179,7 +179,7 @@ def get_task_names(self): gdas_tasks += wave_prep_tasks if self.do_aero and 'gdas' in self.aero_anl_runs: - gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gdas_tasks += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: gdas_tasks += ['prepobsaero'] @@ -218,7 +218,7 @@ def get_task_names(self): gfs_tasks += wave_prep_tasks if self.do_aero and 'gfs' in self.aero_anl_runs: - gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gfs_tasks += ['aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: gfs_tasks += ['prepobsaero'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 1e4fbfc0fa..bc81ded69a 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -506,13 +506,41 @@ def prepobsaero(self): return task + def aeroanlgenb(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlgenb') + task_name = f'{self.run}aeroanlgenb' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': 'gdas_half,gdas', + 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlgenb.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def aeroanlinit(self): deps = [] + dep_dict = {'type': 'task', 'name': 'gdasaeroanlgenb', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_prep_obs_aero: dep_dict = {'type': 'task', 'name': f'{self.run}prepobsaero'} - deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('aeroanlinit') @@ -532,21 +560,28 @@ def aeroanlinit(self): return task - def aeroanlrun(self): + def aeroanlvar(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlinit'} + dep_dict = { + 'type': 'task', 'name': f'gdasaeroanlgenb', + 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}", + } deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + dep_dict = { + 'type': 'task', 'name': f'{self.run}aeroanlinit', + } + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('aeroanlrun') - task_name = f'{self.run}aeroanlrun' + resources = self.get_resource('aeroanlvar') + task_name = f'{self.run}aeroanlvar' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlrun.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -559,7 +594,7 @@ def aeroanlrun(self): def aeroanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlrun'} + dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlvar'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 72dfba3edf..0bd9a36282 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', - 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', + 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'prepsnowobs', 'snowanl', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', From 92af67297913bcbf4e68d560358e14c1a3bfdd08 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 2 Aug 2024 17:11:16 +0000 Subject: [PATCH 76/90] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 52f41a298b..cc4568df41 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 52f41a298b4c6b7bbf6f203b6579516819fbbf36 +Subproject commit cc4568df41cae28a9222ea7931bfee51466111c9 From 9679aa96e6aa40a18293c90d15069c317e670163 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 7 Aug 2024 17:14:22 +0000 Subject: [PATCH 77/90] update increment mask --- scripts/exgdas_enkf_snow_recenter.py | 1 + sorc/gdas.cd | 2 +- ush/python/pygfs/task/snowens_analysis.py | 36 ++++++++++++++++++++--- 3 files changed, 34 insertions(+), 5 deletions(-) diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py index 5e831efedf..fcd501860c 100755 --- a/scripts/exgdas_enkf_snow_recenter.py +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -22,6 +22,7 @@ anl = SnowEnsAnalysis(config) anl.initialize() anl.genWeights() + anl.genMask() anl.regridDetBkg() anl.regridDetInc() anl.recenterEns() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index cc4568df41..f3fa26d4d6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit cc4568df41cae28a9222ea7931bfee51466111c9 +Subproject commit f3fa26d4d6693fcf451184d5ecabb86c1b4190ca diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 6ddc358334..79fdfd1454 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -111,9 +111,9 @@ def genWeights(self) -> None: # loop through tiles for tile in range(1, self.task_config.ntiles + 1): - # open the sfc restart and get the soil moisture + # open the restart and get the vegetation type rst = nc.Dataset(f"./bkg/det/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc") - smc = rst.variables['smc'][:] + vtype = rst.variables['vtype'][:] rst.close() # open the oro data and get the land fraction oro = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data.tile{tile}.nc") @@ -125,12 +125,40 @@ def genWeights(self) -> None: lon = ncfile.createDimension('lon', case_int) lat = ncfile.createDimension('lat', case_int) lsm_frac_out = ncfile.createVariable('lsm_frac', np.float32, ('lon', 'lat')) - # mask the land fraction where soil moisture is less than 1 - land_frac[np.where(smc[0, 0, ...] == 1)] = 0 + # set the land fraction to 0 on glaciers to not interpolate that snow + glacier = 15 + land_frac[np.where(vtype[0, ...] == glacier)] = 0 lsm_frac_out[:] = land_frac # write out and close the file ncfile.close() + @logit(logger) + def genMask(self) -> None: + """Create a mask for use by JEDI + to mask out snow increments on non-LSM gridpoints + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the restart and get the vegetation type + rst = nc.Dataset(f"./bkg/mem001/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc", mode="r+") + vtype = rst.variables['vtype'][:] + slmsk = rst.variables['slmsk'][:] + # slmsk(Time, yaxis_1, xaxis_1) + # set the mask to 3 on glaciers + glacier = 15 + slmsk[np.where(vtype == glacier)] = 3 + # write out and close the file + rst.variables['slmsk'][:] = slmsk + rst.close() + @logit(logger) def regridDetBkg(self) -> None: """Run fregrid to regrid the deterministic snow background From 56b940c6d7c2789a729e74cd8bfa99c174c989dd Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Wed, 7 Aug 2024 14:09:30 -0400 Subject: [PATCH 78/90] Marine DA updates (#2802) Companion PR to GDASApp#1226 Addresses a few issues described in: - GDASApp#1219 - GDASApp#1217 - GDASApp#1232 --- ci/cases/gfsv17/ocnanal.yaml | 2 +- jobs/rocoto/prepoceanobs.sh | 6 ++++-- parm/config/gfs/config.resources | 2 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/marine_bmat.py | 11 +++++------ ush/python/pygfs/utils/marine_da_utils.py | 23 ----------------------- versions/fix.ver | 2 +- 7 files changed, 13 insertions(+), 35 deletions(-) diff --git a/ci/cases/gfsv17/ocnanal.yaml b/ci/cases/gfsv17/ocnanal.yaml index a2d7363c18..483250db10 100644 --- a/ci/cases/gfsv17/ocnanal.yaml +++ b/ci/cases/gfsv17/ocnanal.yaml @@ -17,7 +17,7 @@ base: ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} ocnanal: - SOCA_INPUT_FIX_DIR: {{ FIXgfs }}/gdas/soca/1440x1080x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml SOCA_NINNER: 100 diff --git a/jobs/rocoto/prepoceanobs.sh b/jobs/rocoto/prepoceanobs.sh index d8626f5518..20aca4f15a 100755 --- a/jobs/rocoto/prepoceanobs.sh +++ b/jobs/rocoto/prepoceanobs.sh @@ -14,8 +14,10 @@ export jobid="${job}.$$" ############################################################### # setup python path for class defs and utils - -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush" +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" +PYTHONPATH="${pyiodaPATH}:${PYTHONPATH}" +export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index ecf942864f..b84784e183 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -580,7 +580,7 @@ case ${step} in case ${OCNRES} in "025") memory="128GB" - ntasks=40;; + ntasks=16;; "050") memory="32GB" ntasks=16;; diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 5583fbb688..f62b9128a9 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 5583fbb6884a9b98fb3df76058ea2f51414299a2 +Subproject commit f62b9128a98cce3d800dd90ad85753b6e178665f diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 9d64e621c9..4770583934 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -43,7 +43,7 @@ def __init__(self, config): 'MARINE_WINDOW_END': _window_end, 'MARINE_WINDOW_MIDDLE': self.task_config.current_cycle, 'BERROR_YAML_DIR': os.path.join(_home_gdas, 'parm', 'soca', 'berror'), - 'GRID_GEN_YAML': os.path.join(_home_gdas, 'parm', 'soca', 'gridgen', 'gridgen.yaml'), + 'UTILITY_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'soca_utils_stage.yaml.j2'), 'MARINE_ENSDA_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'ensda', 'stage_ens_mem.yaml.j2'), 'MARINE_DET_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'soca_det_bkg_stage.yaml.j2'), 'ENSPERT_RELPATH': _enspert_relpath, @@ -82,12 +82,11 @@ def initialize(self: Task) -> None: # TODO(G): Check ocean backgrounds dates for consistency bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) FileHandler(bkg_list).sync() - for cice_fname in ['./INPUT/cice.res.nc', './bkg/ice.bkg.f006.nc', './bkg/ice.bkg.f009.nc']: - mdau.cice_hist2fms(cice_fname, cice_fname) - # stage the grid generation yaml - FileHandler({'copy': [[self.task_config.GRID_GEN_YAML, - os.path.join(self.task_config.DATA, 'gridgen.yaml')]]}).sync() + # stage the soca utility yamls (gridgen, fields and ufo mapping yamls) + logger.info(f"Staging SOCA utility yaml files from {self.task_config.HOMEgfs}/parm/gdas/soca") + soca_utility_list = parse_j2yaml(self.task_config.UTILITY_YAML_TMPL, self.task_config) + FileHandler(soca_utility_list).sync() # generate the variance partitioning YAML file logger.debug("Generate variance partitioning YAML file") diff --git a/ush/python/pygfs/utils/marine_da_utils.py b/ush/python/pygfs/utils/marine_da_utils.py index 016551878b..2be76ac028 100644 --- a/ush/python/pygfs/utils/marine_da_utils.py +++ b/ush/python/pygfs/utils/marine_da_utils.py @@ -61,29 +61,6 @@ def prep_input_nml(task_config: AttrDict) -> None: nml.write('mom_input.nml') -@logit(logger) -def cice_hist2fms(input_filename: str, output_filename: str) -> None: - """ Reformat the CICE history file so it can be read by SOCA/FMS - Simple reformatting utility to allow soca/fms to read the CICE history files - """ - - # open the CICE history file - ds = xr.open_dataset(input_filename) - - if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables: - logger.info(f"*** Already reformatted, skipping.") - return - - # rename the dimensions to xaxis_1 and yaxis_1 - ds = ds.rename({'ni': 'xaxis_1', 'nj': 'yaxis_1'}) - - # rename the variables - ds = ds.rename({'aice_h': 'aicen', 'hi_h': 'hicen', 'hs_h': 'hsnon'}) - - # Save the new netCDF file - ds.to_netcdf(output_filename, mode='w') - - @logit(logger) def stage_ens_mem(task_config: AttrDict) -> None: """ Copy the ensemble members to the DATA directory diff --git a/versions/fix.ver b/versions/fix.ver index c77b0a22c1..3f85a45fee 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -9,7 +9,7 @@ export cpl_ver=20230526 export datm_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 -export gdas_soca_ver=20240624 +export gdas_soca_ver=20240802 export gdas_gsibec_ver=20240416 export gdas_obs_ver=20240213 export glwu_ver=20220805 From efb7804b64533d81ee8109e15a9d778b589e651c Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:10:49 -0400 Subject: [PATCH 79/90] Feature/get arch adds an empty archive job to GEFS system (#2772) - This feature added an archive task for GEFS system. This task will run in the service queue with a single processor. The task executes with success. - Only xml is generated. Refs #832 Refs #2698 --- jobs/rocoto/arch_test.sh | 3 +++ parm/config/gefs/config.arch | 15 +++++++++++ parm/config/gefs/config.resources | 7 ++++++ workflow/applications/gefs.py | 4 ++- workflow/rocoto/gefs_tasks.py | 41 +++++++++++++++++++++++++++++++ 5 files changed, 69 insertions(+), 1 deletion(-) create mode 100755 jobs/rocoto/arch_test.sh create mode 100644 parm/config/gefs/config.arch diff --git a/jobs/rocoto/arch_test.sh b/jobs/rocoto/arch_test.sh new file mode 100755 index 0000000000..c723c842aa --- /dev/null +++ b/jobs/rocoto/arch_test.sh @@ -0,0 +1,3 @@ +#! /usr/bin/env bash +############################################################### +exit 0 diff --git a/parm/config/gefs/config.arch b/parm/config/gefs/config.arch new file mode 100644 index 0000000000..a23bcce6ae --- /dev/null +++ b/parm/config/gefs/config.arch @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} + +echo "END: config.arch" diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 81d2a20635..8c3ba88940 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -280,6 +280,13 @@ case ${step} in export is_exclusive=False ;; + "arch") + export walltime="06:00:00" + export ntasks=1 + export tasks_per_node=1 + export threads_per_task=1 + export memory="4096M" + ;; *) echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}" exit 1 diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index 364ee2c48b..c1e001c171 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -14,7 +14,7 @@ def _get_app_configs(self): """ Returns the config_files that are involved in gefs """ - configs = ['stage_ic', 'fcst', 'atmos_products'] + configs = ['stage_ic', 'fcst', 'atmos_products', 'arch'] if self.nens > 0: configs += ['efcs', 'atmos_ensstat'] @@ -79,4 +79,6 @@ def get_task_names(self): if self.do_extractvars: tasks += ['extractvars'] + tasks += ['arch'] + return {f"{self._base['RUN']}": tasks} diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 1b357d8ee3..e78ac96d83 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -546,3 +546,44 @@ def extractvars(self): task = rocoto.create_task(member_metatask_dict) return task + + def arch(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': 'atmos_prod'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'atmos_ensstat'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ice: + dep_dict = {'type': 'metatask', 'name': 'ice_prod'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ocean: + dep_dict = {'type': 'metatask', 'name': 'ocean_prod'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'metatask', 'name': 'wave_post_grid'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'wave_post_pnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'metatask', 'name': 'wave_post_bndpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'wave_post_bndpnt_bull'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') + + resources = self.get_resource('arch') + task_name = 'arch' + task_dict = {'task_name': task_name, + 'resources': resources, + 'envars': self.envars, + 'cycledef': 'gefs', + 'dependency': dependencies, + 'command': f'{self.HOMEgfs}/jobs/rocoto/arch_test.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task From cdac7bbe6cab69a3d18776f4e74db2efbdeb5ee2 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:41:45 -0400 Subject: [PATCH 80/90] Check that a PR driver is still running before trying to kill it (#2799) Adds a check to the SSH command used to kill child PIDs of a defunct driver instance on a different head node to prevent invalid kill commands, preventing CI failures. Resolves #2798 --- ci/scripts/check_ci.sh | 8 ++++---- ci/scripts/driver.sh | 11 +++++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index 04dd92f4a6..24c5e242c3 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -50,14 +50,14 @@ fi export GH rocotostat=$(command -v rocotostat) -if [[ -z ${rocotostat+x} ]]; then +if [[ -z ${rocotostat} ]]; then echo "rocotostat not found on system" exit 1 else echo "rocotostat being used from ${rocotostat}" fi rocotocheck=$(command -v rocotocheck) -if [[ -z ${rocotocheck+x} ]]; then +if [[ -z ${rocotocheck} ]]; then echo "rocotocheck not found on system" exit 1 else @@ -70,7 +70,7 @@ pr_list="" if [[ -f "${pr_list_dbfile}" ]]; then pr_list=$("${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --list Open Running) || true fi -if [[ -z "${pr_list+x}" ]]; then +if [[ -z "${pr_list}" ]]; then echo "no PRs open and ready to run cases on .. exiting" exit 0 fi @@ -124,7 +124,7 @@ for pr in ${pr_list}; do for pslot_dir in "${pr_dir}/RUNTESTS/EXPDIR/"*; do pslot=$(basename "${pslot_dir}") || true - if [[ -z "${pslot+x}" ]]; then + if [[ -z "${pslot}" ]]; then echo "No experiments found in ${pslot_dir} .. exiting" exit 0 fi diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 0f53ebff6f..8a99817325 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -77,8 +77,9 @@ pr_list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" - for pr in ${pr_list}; do pr_dir="${GFS_CI_ROOT}/PR/${pr}" + [[ ! -d ${pr_dir} ]] && mkdir -p "${pr_dir}" db_list=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}") - output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_single.log" + output_ci_single="${pr_dir}/output_single.log" ############################################################# # Check if a Ready labeled PR has changed back from once set # and in that case completely kill the previose driver.sh cron @@ -107,7 +108,9 @@ for pr in ${pr_list}; do echo -e "${pstree_out}" | grep -Pow "(?<=\()[0-9]+(?=\))" | xargs kill fi else - ssh "${driver_HOST}" 'pstree -A -p "${driver_PID}" | grep -Eow "[0-9]+" | xargs kill' + # Check if the driver is still running on the head node; if so, kill it and all child processes + #shellcheck disable=SC2029 + ssh "${driver_HOST}" "pstree -A -p \"${driver_PID}\" | grep -Eow \"[0-9]+\" | xargs kill || echo \"Failed to kill process with PID: ${driver_PID}, it may not be valid.\"" fi { echo "Driver PID: Requested termination of ${driver_PID} and children on ${driver_HOST}" @@ -141,7 +144,7 @@ pr_list="" if [[ -f "${pr_list_dbfile}" ]]; then pr_list=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --list Open Ready) || true fi -if [[ -z "${pr_list+x}" ]]; then +if [[ -z "${pr_list}" ]]; then echo "no PRs open and ready for checkout/build .. exiting" exit 0 fi @@ -155,7 +158,7 @@ fi for pr in ${pr_list}; do # Skip pr's that are currently Building for when overlapping driver scripts are being called from within cron pr_building=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | grep Building) || true - if [[ -z "${pr_building+x}" ]]; then + if [[ -n "${pr_building}" ]]; then continue fi id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') From 03aee25e8e8268847437de8c3270c55dd15240af Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 7 Aug 2024 19:40:57 +0000 Subject: [PATCH 81/90] Revert "remove CDUMP" This reverts commit f82dc68f585cda20a2ecdbf9cf0841ed57209e94. --- env/HERA.env | 7 +- env/HERCULES.env | 7 +- env/JET.env | 11 +- env/ORION.env | 7 +- env/S4.env | 7 +- env/WCOSS2.env | 7 +- jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX | 46 --- jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE | 16 +- jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE | 10 +- ..._VARIATIONAL => JGLOBAL_AERO_ANALYSIS_RUN} | 4 +- jobs/rocoto/aeroanlgenb.sh | 19 -- jobs/rocoto/{aeroanlvar.sh => aeroanlrun.sh} | 4 +- parm/config/gfs/config.aeroanl | 24 +- parm/config/gfs/config.aeroanlgenb | 29 -- parm/config/gfs/config.aeroanlrun | 11 + parm/config/gfs/config.aeroanlvar | 11 - parm/config/gfs/config.com | 1 - parm/config/gfs/config.resources | 54 +--- parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 | 19 -- parm/gdas/aero_finalize_variational.yaml.j2 | 23 -- parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 | 38 --- parm/gdas/aero_stage_variational.yaml.j2 | 50 --- parm/ufs/gocart/ExtData.other | 20 +- .../exgdas_aero_analysis_generate_bmatrix.py | 27 -- ...ional.py => exglobal_aero_analysis_run.py} | 6 +- sorc/link_workflow.sh | 2 - ush/forecast_postdet.sh | 12 - ush/python/pygfs/__init__.py | 2 - ush/python/pygfs/task/aero_analysis.py | 129 +++++++- ush/python/pygfs/task/aero_bmatrix.py | 294 ------------------ ush/python/pygfs/task/analysis.py | 2 +- ush/python/pygfs/task/bmatrix.py | 28 -- workflow/applications/gfs_cycled.py | 6 +- workflow/rocoto/gfs_tasks.py | 51 +-- workflow/rocoto/tasks.py | 2 +- 35 files changed, 191 insertions(+), 795 deletions(-) delete mode 100755 jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX rename jobs/{JGLOBAL_AERO_ANALYSIS_VARIATIONAL => JGLOBAL_AERO_ANALYSIS_RUN} (83%) delete mode 100755 jobs/rocoto/aeroanlgenb.sh rename jobs/rocoto/{aeroanlvar.sh => aeroanlrun.sh} (83%) delete mode 100644 parm/config/gfs/config.aeroanlgenb create mode 100644 parm/config/gfs/config.aeroanlrun delete mode 100644 parm/config/gfs/config.aeroanlvar delete mode 100644 parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 delete mode 100644 parm/gdas/aero_finalize_variational.yaml.j2 delete mode 100644 parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 delete mode 100644 parm/gdas/aero_stage_variational.yaml.j2 delete mode 100755 scripts/exgdas_aero_analysis_generate_bmatrix.py rename scripts/{exglobal_aero_analysis_variational.py => exglobal_aero_analysis_run.py} (84%) delete mode 100644 ush/python/pygfs/task/aero_bmatrix.py delete mode 100644 ush/python/pygfs/task/bmatrix.py diff --git a/env/HERA.env b/env/HERA.env index ae82bfac02..3f0e7c9f36 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -82,18 +82,13 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlvar" ]]; then +elif [[ "${step}" = "aeroanlrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" -elif [[ "${step}" = "aeroanlgenb" ]]; then - - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" - elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/HERCULES.env b/env/HERCULES.env index 151a2da251..83fa1aadd1 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -86,17 +86,12 @@ case ${step} in export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" ;; - "aeroanlvar") + "aeroanlrun") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" - ;; - "aeroanlgenb") - - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" ;; "prepobsaero") diff --git a/env/JET.env b/env/JET.env index d93d8438fc..810a8cd501 100755 --- a/env/JET.env +++ b/env/JET.env @@ -70,17 +70,12 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${launcher} ${ntasks}" -elif [[ "${step}" = "aeroanlvar" ]]; then - - export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN}" - -elif [[ "${step}" = "aeroanlgenb" ]]; then +elif [[ "${step}" = "aeroanlrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export NTHREADS_AEROANL=${NTHREADSmax} + export APRUN_AEROANL="${APRUN}" elif [[ "${step}" = "prepobsaero" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 25fba6cfa6..bbbfb59182 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -78,18 +78,13 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlvar" ]]; then +elif [[ "${step}" = "aeroanlrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" -elif [[ "${step}" = "aeroanlgenb" ]]; then - - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" - elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/env/S4.env b/env/S4.env index 5b7432104c..840ca65898 100755 --- a/env/S4.env +++ b/env/S4.env @@ -70,18 +70,13 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlvar" ]]; then +elif [[ "${step}" = "aeroanlrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" -elif [[ "${step}" = "aeroanlgenb" ]]; then - - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" - elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 89cc51da97..18caf1bc03 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -63,18 +63,13 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlvar" ]]; then +elif [[ "${step}" = "aeroanlrun" ]]; then export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" -elif [[ "${step}" = "aeroanlgenb" ]]; then - - export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN}" - elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} diff --git a/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX b/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX deleted file mode 100755 index 81c89e9155..0000000000 --- a/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aeroanlgenb" - -############################################## -# Set variables used in the script -############################################## - -############################################## -# Begin JOB SPECIFIC work -############################################## - -# Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL \ - COMOUT_CHEM_BMAT:COM_CHEM_BMAT_TMPL \ - COMIN_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL - -mkdir -p "${COMOUT_CHEM_BMAT}" - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASAEROBMATPY:-${SCRgfs}/exgdas_aero_analysis_generate_bmatrix.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" || exit 1 -[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" - -exit 0 diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index b894b82531..455f572da5 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -8,17 +8,25 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroan ############################################## # Set variables used in the script ############################################## +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ - COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL \ - COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL -mkdir -p "${COMOUT_CHEM_ANALYSIS}" +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index 5be8767308..b2a2893bc0 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -19,15 +19,13 @@ GDUMP="gdas" ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ - COM_OBS:COM_OBS_TMPL \ - COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL \ - COMIN_CHEM_BMAT_PREV:COM_CHEM_BMAT_TMPL + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL -mkdir -p "${COMOUT_CHEM_ANALYSIS}" +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL b/jobs/JGLOBAL_AERO_ANALYSIS_RUN similarity index 83% rename from jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL rename to jobs/JGLOBAL_AERO_ANALYSIS_RUN index 290d7225dd..43749b78c5 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL +++ b/jobs/JGLOBAL_AERO_ANALYSIS_RUN @@ -3,7 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlvar" +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlrun" ############################################## # Set variables used in the script @@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlv ############################################################### # Run relevant script -EXSCRIPT=${GDASAEROVARSH:-${SCRgfs}/exglobal_aero_analysis_variational.py} +EXSCRIPT=${GDASAERORUNSH:-${SCRgfs}/exglobal_aero_analysis_run.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/aeroanlgenb.sh b/jobs/rocoto/aeroanlgenb.sh deleted file mode 100755 index d0bc5dda9b..0000000000 --- a/jobs/rocoto/aeroanlgenb.sh +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -# Source UFSDA workflow modules -. "${HOMEgfs}/ush/load_ufsda_modules.sh" -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -export job="aeroanlgenb" -export jobid="${job}.$$" - -############################################################### - -# Execute the JJOB -"${HOMEgfs}/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX" -status=$? -exit "${status}" diff --git a/jobs/rocoto/aeroanlvar.sh b/jobs/rocoto/aeroanlrun.sh similarity index 83% rename from jobs/rocoto/aeroanlvar.sh rename to jobs/rocoto/aeroanlrun.sh index 7aa7d831f9..529bb2d7d1 100755 --- a/jobs/rocoto/aeroanlvar.sh +++ b/jobs/rocoto/aeroanlrun.sh @@ -8,11 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="aeroanlvar" +export job="aeroanlrun" export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL" +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN" status=$? exit "${status}" diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 19766062d9..24a5e92644 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -5,36 +5,20 @@ echo "BEGIN: config.aeroanl" -# define analysis resolution based on deterministic res -case ${CASE} in - "C1152" | "C768" | "C384" | "C192") - CASE_ANL="C192" - ;; - "C96" | "C48") - CASE_ANL=${CASE} - ;; - *) - echo "FATAL ERROR: Aerosol DA not supported at ${CASE} resolution" - exit 4 -esac -export CASE_ANL +export CASE_ANL=${CASE} export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" -export STATICB_TYPE='diffusion' +export STATICB_TYPE='identity' export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" -export BERROR_DATA_DIR="${FIXgfs}/gdas/aero/clim_b" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" +export BERROR_DATE="20160630.000000" export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" -export AERO_STAGE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_stage_variational.yaml.j2" -export AERO_FINALIZE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_finalize_variational.yaml.j2" - export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE="${EXECgfs}/gdas.x" -export BMATEXE="${EXECgfs}/gdasapp_chem_diagb.x" -export DIFFUSIONEXE="${EXECgfs}/gdas_fv3jedi_error_covariance_toolbox.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" diff --git a/parm/config/gfs/config.aeroanlgenb b/parm/config/gfs/config.aeroanlgenb deleted file mode 100644 index b41b22a524..0000000000 --- a/parm/config/gfs/config.aeroanlgenb +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -x - -########## config.aeroanlgenb ########## -# Aerosol Variance specific - -echo "BEGIN: config.aeroanlgenb" - -# Get task specific resources -source "${EXPDIR}/config.resources" aeroanlgenb - -export BMATYAML="${PARMgfs}/gdas/aero/berror/aero_diagb.yaml.j2" -export DIFFUSIONYAML="${PARMgfs}/gdas/aero/berror/aero_diffusionparm.yaml.j2" -export INTERPYAML="${PARMgfs}/gdas/aero/berror/aero_interp.yaml.j2" -export AERO_BMATRIX_STAGE_TMPL="${PARMgfs}/gdas/aero_stage_bmatrix_bkg.yaml.j2" -export AERO_BMATRIX_FINALIZE_TMPL="${PARMgfs}/gdas/aero_finalize_bmatrix_bkg.yaml.j2" -export aero_diffusion_iter=10 -export aero_diffusion_horiz_len=2500e3 -export aero_diffusion_fixed_val=1.0 -export npx_clim_b=97 -export npy_clim_b=97 -export aero_diagb_weight=0.9 -export aero_staticb_rescaling_factor=2.0 -export aero_diagb_rescale=20.0 -export aero_diagb_n_halo=4 -export aero_diagb_n_neighbors=16 -export aero_diagb_smooth_horiz_iter=0 -export aero_diagb_smooth_vert_iter=0 - -echo "END: config.aeroanlgenb" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun new file mode 100644 index 0000000000..012e5b79f3 --- /dev/null +++ b/parm/config/gfs/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.aeroanlvar b/parm/config/gfs/config.aeroanlvar deleted file mode 100644 index 4282b6c840..0000000000 --- a/parm/config/gfs/config.aeroanlvar +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -x - -########## config.aeroanlvar ########## -# Aerosol Analysis specific - -echo "BEGIN: config.aeroanlvar" - -# Get task specific resources -source "${EXPDIR}/config.resources" aeroanlvar - -echo "END: config.aeroanlvar" diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 818ea38017..222ffdae95 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -98,6 +98,5 @@ declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' -declare -rx COM_CHEM_BMAT_TMPL=${COM_CHEM_ANALYSIS_TMPL}'/bmatrix' declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index b84784e183..a596629e76 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -16,7 +16,7 @@ if (( $# != 1 )); then echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl" - echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal" + echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" @@ -355,12 +355,12 @@ case ${step} in layout_y=8 ;; "C384") - layout_x=6 - layout_y=6 + layout_x=8 + layout_y=8 ;; "C192" | "C96") - layout_x=4 - layout_y=4 + layout_x=8 + layout_y=8 ;; "C48" ) # this case is for testing only @@ -381,53 +381,19 @@ case ${step} in memory="3072M" ;; - "aeroanlvar") + "aeroanlrun") case ${CASE} in "C768") layout_x=8 layout_y=8 ;; "C384") - layout_x=6 - layout_y=6 - ;; - "C192" | "C96") - layout_x=4 - layout_y=4 - ;; - "C48" ) - # this case is for testing only - layout_x=1 - layout_y=1 - ;; - *) - echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" - exit 4 - esac - - export layout_x - export layout_y - - walltime="00:30:00" - ntasks=$(( layout_x * layout_y * 6 )) - threads_per_task=1 - tasks_per_node=$(( max_tasks_per_node / threads_per_task )) - export is_exclusive=True - ;; - - "aeroanlgenb") - case ${CASE} in - "C768") layout_x=8 layout_y=8 ;; - "C384") - layout_x=6 - layout_y=6 - ;; "C192" | "C96") - layout_x=4 - layout_y=4 + layout_x=8 + layout_y=8 ;; "C48" ) # this case is for testing only @@ -435,7 +401,7 @@ case ${step} in layout_y=1 ;; *) - echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" exit 4 esac @@ -447,10 +413,8 @@ case ${step} in threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - ;; - "aeroanlfinal") walltime="00:10:00" ntasks=1 diff --git a/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 deleted file mode 100644 index b33f280945..0000000000 --- a/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 +++ /dev/null @@ -1,19 +0,0 @@ -{% set cycle_HH = current_cycle | strftime("%H") %} -{% set HEAD = RUN + ".t" + cycle_HH + "z." %} -{% set offset_td = "+6H" | to_timedelta %} -{% set background_time = current_cycle | add_to_datetime(offset_td) %} -copy: -### copy YAMLs used -{% set yaml_list = ['chem_diagb.yaml', 'chem_diffusion.yaml'] %} -{% for fname in yaml_list %} -- ["{{ DATA }}/{{ HEAD }}{{ fname }}", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}{{ fname }}"] -{% endfor %} -### copy stddev files to ROTDIR -{% for tile in range(1, ntiles+1) %} -- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} -### copy coupler file -- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.coupler.res", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.coupler.res"] -### copy diffusion files -- ["{{ DATA }}/diffusion/diffusion_hz.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_hz.nc"] -- ["{{ DATA }}/diffusion/diffusion_vt.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_vt.nc"] diff --git a/parm/gdas/aero_finalize_variational.yaml.j2 b/parm/gdas/aero_finalize_variational.yaml.j2 deleted file mode 100644 index b9247bcd62..0000000000 --- a/parm/gdas/aero_finalize_variational.yaml.j2 +++ /dev/null @@ -1,23 +0,0 @@ -###################################### -# set some variables -###################################### -{% if DOIAU == True %} - {% set bkgtime = AERO_WINDOW_BEGIN %} -{% else %} - {% set bkgtime = current_cycle %} -{% endif %} -###################################### -mkdir: -- "{{ COMOUT_CHEM_ANALYSIS }}" -- "{{ COMOUT_ATMOS_RESTART }}" -copy: -## copy variational YAML to ROTDIR -- ["{{ DATA }}/{{ APREFIX }}aerovar.yaml", "{{ COMOUT_CHEM_ANALYSIS }}/{{ APREFIX }}aerovar.yaml"] -## copy increments -{% for tile in range(1,ntiles+1) %} -- ["{{ DATA }}/anl/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_ANALYSIS }}/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} -## copy analysis -{% for tile in range(1,ntiles+1) %} -- ["{{ DATA }}/anl/{{ bkgtime | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_ATMOS_RESTART }}/{{ bkgtime | to_fv3time }}.aeroanl_fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} diff --git a/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 deleted file mode 100644 index 9005b9ff12..0000000000 --- a/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 +++ /dev/null @@ -1,38 +0,0 @@ -###################################### -# set some variables -###################################### -{% set offset_td = "+6H" | to_timedelta %} -{% set background_time = current_cycle | add_to_datetime(offset_td) %} -{% set ftype_list = ['fv_core.res', 'fv_tracer.res'] %} -###################################### -# create working directories -###################################### -mkdir: -- "{{ DATA }}/bkg" -- "{{ DATA }}/stddev" -- "{{ DATA }}/clm_stddev" -- "{{ DATA }}/diffusion" -copy: -###################################### -# copy deterministic background files -###################################### -# define variables -# Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'${ROTDIR}':ROTDIR, - '${RUN}':RUN, - '${YMD}':current_cycle | to_YMD, - '${HH}':current_cycle | strftime("%H"), - '${MEMDIR}':""} %} - -- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.coupler.res"] -{% for ftype in ftype_list %} - {% for tile in range(1, ntiles+1) %} -- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc"] - {% endfor %} -{% endfor %} -# copy climatological stddev files -###################################### -{% for tile in range(1, ntiles+1) %} -- ["{{ BERROR_DATA_DIR }}/stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/clm_stddev/stddev.fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} - diff --git a/parm/gdas/aero_stage_variational.yaml.j2 b/parm/gdas/aero_stage_variational.yaml.j2 deleted file mode 100644 index afd0e1b946..0000000000 --- a/parm/gdas/aero_stage_variational.yaml.j2 +++ /dev/null @@ -1,50 +0,0 @@ -###################################### -# set some variables -###################################### -{% if DOIAU == True %} - {% set bkg_times = [] %} - {% for fh in range(0, 7, 3) %} - {% set offset = fh | string + "H" %} - {% set fcst_timedelta = offset | to_timedelta %} - {% set fcst_time = AERO_WINDOW_BEGIN | add_to_datetime(fcst_timedelta) %} - {% do bkg_times.append(fcst_time) %} - {% endfor %} -{% else %} - {% set bkg_times = [] %} - {% do bkg_times.append(current_cycle) %} -{% endif %} -{% set fvfiles = ['fv_core.res.', 'fv_tracer.res.'] %} -###################################### -mkdir: -- "{{ DATA }}/anl" -- "{{ DATA }}/diags" -- "{{ DATA }}/berror" -- "{{ DATA }}/bkg" -copy: -###################################### -## copy backgrounds -{% for bkgtime in bkg_times %} -- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.coupler.res"] - {% for fvfile in fvfiles %} - {% for tile in range(1,ntiles+1) %} -- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc"] - {% endfor %} - {% endfor %} -{% endfor %} -###################################### -## copy backgrounds again for fv_tracer to create analysis files later -{% for tile in range(1,ntiles+1) %} -- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/anl/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} - -###################################### -## copy berror files from COMIN_CHEM_BMAT_PREV -## stddev files -{% for tile in range(1, ntiles+1) %} -- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] -{% endfor %} -### copy coupler file -- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.coupler.res", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.coupler.res"] -### copy diffusion files -- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_hz.nc", "{{ DATA }}/berror/diffusion_hz.nc"] -- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_vt.nc", "{{ DATA }}/berror/diffusion_vt.nc"] diff --git a/parm/ufs/gocart/ExtData.other b/parm/ufs/gocart/ExtData.other index 5d2ddc5102..7a0d63d6ca 100644 --- a/parm/ufs/gocart/ExtData.other +++ b/parm/ufs/gocart/ExtData.other @@ -17,12 +17,12 @@ DU_UTHRES '1' Y E - none none uthres ExtData/n #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -SU_ANTHROL1 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_ANTHROL2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL1 NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Ship emissions -SU_SHIPSO2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_SHIPSO4 NA Y Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO4 NA N Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption SU_AIRCRAFT NA Y Y %y4-%m2-%d2t12:00:00 none none none /dev/null @@ -63,11 +63,11 @@ OC_MTPO NA Y Y %y4-%m2-%d2t12:00:00 none none mtpo ExtData/nexus/MEGAN_ OC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -OC_ANTEOC1 NA Y Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -OC_ANTEOC2 NA Y Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC1 NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC2 NA N Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -OC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption OC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null @@ -88,11 +88,11 @@ pSOA_ANTHRO_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null BC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -BC_ANTEBC1 NA Y Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -BC_ANTEBC2 NA Y Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC1 NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC2 NA N Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -BC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption BC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none bc_aviation /dev/null diff --git a/scripts/exgdas_aero_analysis_generate_bmatrix.py b/scripts/exgdas_aero_analysis_generate_bmatrix.py deleted file mode 100755 index 0d8389c40d..0000000000 --- a/scripts/exgdas_aero_analysis_generate_bmatrix.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python3 -# exgdas_aero_analysis_generate_bmatrix.py -# This script creates an AerosolBMatrix object -# and runs the methods needed -# to stage files, compute the variance, and write to com -# files needed for the variational solver -import os - -from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.aero_bmatrix import AerosolBMatrix - -# Initialize root logger -logger = Logger(level='DEBUG', colored_log=True) - - -if __name__ == '__main__': - - # Take configuration from environment and cast it as python dictionary - config = cast_strdict_as_dtypedict(os.environ) - - # Instantiate the aerosol variance and diffusion correlation tasks - AeroB = AerosolBMatrix(config) - AeroB.initialize() - AeroB.interpBackground() - AeroB.computeVariance() - AeroB.computeDiffusion() - AeroB.finalize() diff --git a/scripts/exglobal_aero_analysis_variational.py b/scripts/exglobal_aero_analysis_run.py similarity index 84% rename from scripts/exglobal_aero_analysis_variational.py rename to scripts/exglobal_aero_analysis_run.py index dd5bb4f65a..85f4b963a4 100755 --- a/scripts/exglobal_aero_analysis_variational.py +++ b/scripts/exglobal_aero_analysis_run.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -# exglobal_aero_analysis_variational.py +# exglobal_aero_analysis_run.py # This script creates an AerosolAnalysis object -# and runs the variational method +# and runs the execute method # which executes the global aerosol variational analysis import os @@ -19,4 +19,4 @@ # Instantiate the aerosol analysis task AeroAnl = AerosolAnalysis(config) - AeroAnl.variational() + AeroAnl.execute() diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index dae98bbd65..be912292fe 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -367,11 +367,9 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("gdas.x" \ "gdas_soca_gridgen.x" \ "gdas_soca_error_covariance_toolbox.x" \ - "gdas_fv3jedi_error_covariance_toolbox.x" \ "gdas_soca_setcorscales.x" \ "gdas_soca_diagb.x" \ "fv3jedi_plot_field.x" \ - "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index bd15ec8f92..7de31d6235 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -60,18 +60,6 @@ FV3_postdet() { break fi done - # Replace fv_tracer with aeroanl_fv_tracer restart files from current cycle (if found) - local nn - for (( nn = 1; nn <= ntiles; nn++ )); do - if [[ -f "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" ]]; then - rm -f "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" - ${NCP} "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" \ - "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" - else - echo "WARNING: 'aeroanl_fv_tracer.res.tile1.nc' not found in '${COMOUT_ATMOS_RESTART}', using 'fv_tracer.res.tile1.nc'" - break - fi - done fi # if [[ "${RERUN}" != "YES" ]]; then fi # if [[ "${warm_start}" == ".true." ]]; then diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index e1ff7e6862..c0b72bbc35 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -2,10 +2,8 @@ import os from .task.analysis import Analysis -from .task.bmatrix import BMatrix from .task.aero_emissions import AerosolEmissions from .task.aero_analysis import AerosolAnalysis -from .task.aero_bmatrix import AerosolBMatrix from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis from .task.marine_bmat import MarineBMat diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 2a22bd5632..69a992d7d4 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -82,18 +82,29 @@ def initialize(self: Analysis) -> None: jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() - # stage files from COM and create working directories - logger.info(f"Staging files prescribed from {self.task_config.AERO_STAGE_VARIATIONAL_TMPL}") - aero_var_stage_list = parse_j2yaml(self.task_config.AERO_STAGE_VARIATIONAL_TMPL, self.task_config) - FileHandler(aero_var_stage_list).sync() + # stage berror files + # copy BUMP files, otherwise it will assume ID matrix + if self.task_config.get('STATICB_TYPE', 'identity') in ['bump']: + FileHandler(self.get_berror_dict(self.task_config)).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() # generate variational YAML file logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config['DATA'], 'anl'), + os.path.join(self.task_config['DATA'], 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + @logit(logger) - def variational(self: Analysis) -> None: + def execute(self: Analysis) -> None: chdir(self.task_config.DATA) @@ -129,15 +140,13 @@ def finalize(self: Analysis) -> None: """ # ---- tar up diags # path of output tar statfile - logger.info('Preparing observation space diagnostics for archiving') - aerostat = os.path.join(self.task_config.COMOUT_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") + aerostat = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config['DATA'], 'diags', 'diag*nc4')) # gzip the files first for diagfile in diags: - logger.info(f'Adding {diagfile} to tar file') with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: f_out.writelines(f_in) @@ -146,16 +155,44 @@ def finalize(self: Analysis) -> None: for diagfile in diags: diaggzip = f"{diagfile}.gz" archive.add(diaggzip, arcname=os.path.basename(diaggzip)) - logger.info(f'Saved diags to {aerostat}') + + # copy full YAML from executable to ROTDIR + src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") + yaml_copy = { + 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # ---- copy RESTART fv_tracer files for future reference + if self.task_config.DOIAU: + bkgtime = self.task_config.AERO_WINDOW_BEGIN + else: + bkgtime = self.task_config.current_cycle + template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(bkgtime), '{tilenum}') + bkglist = [] + for itile in range(1, self.task_config.ntiles + 1): + tracer = template.format(tilenum=itile) + src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f'aeroges.{tracer}') + bkglist.append([src, dest]) + FileHandler({'copy': bkglist}).sync() # ---- add increments to RESTART files logger.info('Adding increments to RESTART files') self._add_fms_cube_sphere_increments() - # copy files back to COM - logger.info(f"Copying files to COM based on {self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL}") - aero_var_final_list = parse_j2yaml(self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL, self.task_config) - FileHandler(aero_var_final_list).sync() + # ---- move increments to ROTDIR + logger.info('Moving increments to ROTDIR') + template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, self.task_config.ntiles + 1): + tracer = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, tracer) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() def clean(self): super().clean() @@ -172,7 +209,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: restart_template = f'{to_fv3time(bkgtime)}.fv_tracer.res.tile{{tilenum}}.nc' increment_template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) - bkg_template = os.path.join(self.task_config.DATA, 'anl', restart_template) + bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) # get list of increment vars incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] @@ -195,7 +232,38 @@ def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: bkg_dict: Dict a dictionary containing the list of model background files to copy for FileHandler """ - bkg_dict = {} + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = task_config.COM_ATMOS_RESTART_PREV + run_dir = os.path.join(task_config['DATA'], 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # if using IAU, we can use FGAT + bkgtimes = [] + begintime = task_config.previous_cycle + for fcsthr in task_config.aero_bkg_fhr: + bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) + + # now loop over background times + for bkgtime in bkgtimes: + # aerosol DA needs coupler + basename = f'{to_fv3time(bkgtime)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # aerosol DA only needs core/tracer + for ftype in ['core', 'tracer']: + template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } return bkg_dict @logit(logger) @@ -217,5 +285,34 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: berror_dict: Dict a dictionary containing the list of background error files to copy for FileHandler """ - berror_dict = {} + # aerosol static-B needs nicas, cor_rh, cor_rv and stddev files. + b_dir = config.BERROR_DATA_DIR + b_datestr = to_fv3time(config.BERROR_DATE) + berror_list = [] + + for ftype in ['stddev']: + coupler = f'{b_datestr}.{ftype}.coupler.res' + berror_list.append([ + os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) + ]) + template = f'{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + tracer = template.format(tilenum=itile) + berror_list.append([ + os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) + ]) + radius = 'cor_aero_universe_radius' + berror_list.append([ + os.path.join(b_dir, radius), os.path.join(config.DATA, 'berror', radius) + ]) + nproc = config.ntiles * config.layout_x * config.layout_y + for nn in range(1, nproc + 1): + berror_list.append([ + os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), + os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') + ]) + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } return berror_dict diff --git a/ush/python/pygfs/task/aero_bmatrix.py b/ush/python/pygfs/task/aero_bmatrix.py deleted file mode 100644 index 9431e45802..0000000000 --- a/ush/python/pygfs/task/aero_bmatrix.py +++ /dev/null @@ -1,294 +0,0 @@ -#!/usr/bin/env python3 - -import os -from logging import getLogger -from typing import List, Dict, Any, Union - -from wxflow import (AttrDict, FileHandler, rm_p, - add_to_datetime, to_fv3time, to_timedelta, - to_fv3time, chdir, Executable, WorkflowException, - parse_j2yaml, save_as_yaml, logit) -from pygfs.task.bmatrix import BMatrix - -logger = getLogger(__name__.split('.')[-1]) - - -class AerosolBMatrix(BMatrix): - """ - Class for global aerosol BMatrix tasks - """ - @logit(logger, name="AerosolBMatrix") - def __init__(self, config: Dict[str, Any]) -> None: - super().__init__(config) - - _res = int(self.task_config['CASE'][1:]) - _res_anl = int(self.task_config['CASE_ANL'][1:]) - - _bmat_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diagb.yaml") - _diffusion_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diffusion.yaml") - _convertstate_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_convertstate.yaml") - - # Create a local dictionary that is repeatedly used across this class - local_dict = AttrDict( - { - 'npx_ges': _res + 1, - 'npy_ges': _res + 1, - 'npz_ges': self.task_config.LEVS - 1, - 'npz': self.task_config.LEVS - 1, - 'npx_anl': _res_anl + 1, - 'npy_anl': _res_anl + 1, - 'npz_anl': self.task_config['LEVS'] - 1, - 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')), - 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'bmat_yaml': _bmat_yaml, - 'diffusion_yaml': _diffusion_yaml, - 'convertstate_yaml': _convertstate_yaml, - } - ) - - # task_config is everything that this task should need - self.task_config = AttrDict(**self.task_config, **local_dict) - - @logit(logger) - def initialize(self: BMatrix) -> None: - super().initialize() - # stage fix files - logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") - jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() - - # stage backgrounds - logger.info(f"Staging backgrounds prescribed from {self.task_config.AERO_BMATRIX_STAGE_TMPL}") - aero_bmat_stage_list = parse_j2yaml(self.task_config.AERO_BMATRIX_STAGE_TMPL, self.task_config) - FileHandler(aero_bmat_stage_list).sync() - - # generate convert state YAML file - logger.info(f"Generate convert state YAML file: {self.task_config.convertstate_yaml}") - self.task_config.convertstate_config = parse_j2yaml(self.task_config.INTERPYAML, - self.task_config, - searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(self.task_config.convertstate_config, self.task_config.convertstate_yaml) - logger.info(f"Wrote convert state YAML to: {self.task_config.convertstate_yaml}") - - # generate diagb YAML file - logger.info(f"Generate bmat YAML file: {self.task_config.bmat_yaml}") - self.task_config.bmat_config = parse_j2yaml(self.task_config.BMATYAML, - self.task_config, - searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(self.task_config.bmat_config, self.task_config.bmat_yaml) - logger.info(f"Wrote bmat YAML to: {self.task_config.bmat_yaml}") - - # generate diffusion parameters YAML file - logger.info(f"Generate diffusion YAML file: {self.task_config.diffusion_yaml}") - self.task_config.diffusion_config = parse_j2yaml(self.task_config.DIFFUSIONYAML, - self.task_config, - searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(self.task_config.diffusion_config, self.task_config.diffusion_yaml) - logger.info(f"Wrote diffusion YAML to: {self.task_config.diffusion_yaml}") - - # link executable to run directory - self.link_bmatexe() - self.link_diffusion_exe() - self.link_jediexe() - - @logit(logger) - def interpBackground(self) -> None: - chdir(self.task_config.DATA) - - exec_cmd = Executable(self.task_config.APRUN_AEROGENB) - exec_name = os.path.join(self.task_config.DATA, 'gdas.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('fv3jedi') - exec_cmd.add_default_arg('convertstate') - exec_cmd.add_default_arg(self.task_config.convertstate_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") - - pass - - @logit(logger) - def computeVariance(self) -> None: - - chdir(self.task_config.DATA) - - exec_cmd = Executable(self.task_config.APRUN_AEROGENB) - exec_name = os.path.join(self.task_config.DATA, 'gdasapp_chem_diagb.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.bmat_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") - - pass - - @logit(logger) - def computeDiffusion(self) -> None: - - chdir(self.task_config.DATA) - - exec_cmd_diffusion = Executable(self.task_config.APRUN_AEROGENB) - exec_name_diffusion = os.path.join(self.task_config.DATA, 'gdas_fv3jedi_error_covariance_toolbox.x') - exec_cmd_diffusion.add_default_arg(exec_name_diffusion) - exec_cmd_diffusion.add_default_arg(self.task_config.diffusion_yaml) - - try: - logger.debug(f"Executing {exec_cmd_diffusion}") - exec_cmd_diffusion() - except OSError: - raise OSError(f"Failed to execute {exec_cmd_diffusion}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd_diffusion}") - - pass - - @logit(logger) - def finalize(self) -> None: - super().finalize() - # save files to COMOUT - logger.info(f"Saving files to COMOUT based on {self.task_config.AERO_BMATRIX_FINALIZE_TMPL}") - aero_bmat_finalize_list = parse_j2yaml(self.task_config.AERO_BMATRIX_FINALIZE_TMPL, self.task_config) - FileHandler(aero_bmat_finalize_list).sync() - - @logit(logger) - def link_jediexe(self) -> None: - """ - - This method links a JEDI executable to the run directory - - Parameters - ---------- - Task: GDAS task - - Returns - ---------- - None - """ - exe_src = self.task_config.JEDIEXE - - # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - logger.info(f"Link executable {exe_src} to DATA/") - logger.warn("Linking is not permitted per EE2.") - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - return exe_dest - - @logit(logger) - def link_bmatexe(self) -> None: - """ - - This method links a JEDI executable to the run directory - - Parameters - ---------- - Task: GDAS task - - Returns - ---------- - None - """ - exe_src = self.task_config.BMATEXE - - # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - logger.info(f"Link executable {exe_src} to DATA/") - logger.warn("Linking is not permitted per EE2.") - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - return - - @logit(logger) - def link_diffusion_exe(self) -> None: - """ - - This method links a JEDI (fv3jedi_error_covariance_toolbox.x) - executable to the run directory - - Parameters - ---------- - Task: GDAS task - - Returns - ---------- - None - """ - - exe_src_diffusion = self.task_config.DIFFUSIONEXE - - # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - logger.info(f"Link executable {exe_src_diffusion} to DATA/") - logger.warn("Linking is not permitted per EE2.") - exe_dest_diffusion = os.path.join(self.task_config.DATA, os.path.basename(exe_src_diffusion)) - if os.path.exists(exe_dest_diffusion): - rm_p(exe_dest_diffusion) - os.symlink(exe_src_diffusion, exe_dest_diffusion) - - return - - @logit(logger) - def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) - that are needed for global aerosol DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - task_config: Dict - a dictionary containing all of the configuration needed for the task - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - - # get FV3 RESTART files, this will be a lot simpler when using history files - rst_dir = task_config.COM_ATMOS_RESTART_PREV - run_dir = os.path.join(task_config['DATA'], 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # if using IAU, we can use FGAT - bkgtimes = [] - begintime = task_config.previous_cycle - for fcsthr in task_config.aero_bkg_fhr: - bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) - - # now loop over background times - for bkgtime in bkgtimes: - # aerosol DA needs coupler - basename = f'{to_fv3time(bkgtime)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # aerosol DA only needs core/tracer - for ftype in ['core', 'tracer']: - template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist, - } - return bkg_dict diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 0fc07467a0..e407cf1765 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -196,7 +196,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li @logit(logger) def link_jediexe(self) -> None: - """ + """Compile a dictionary of background error files to copy This method links a JEDI executable to the run directory diff --git a/ush/python/pygfs/task/bmatrix.py b/ush/python/pygfs/task/bmatrix.py deleted file mode 100644 index d0edba2358..0000000000 --- a/ush/python/pygfs/task/bmatrix.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python3 - -import os -from logging import getLogger -from typing import List, Dict, Any, Union - -from wxflow import (parse_j2yaml, FileHandler, logit, - Task, Executable, WorkflowException) - -logger = getLogger(__name__.split('.')[-1]) - - -class BMatrix(Task): - """Parent class for GDAS BMatrix tasks - - The BMatrix class is the parent class for all - Global Data Assimilation System (GDAS) BMatrix tasks - """ - def __init__(self, config: Dict[str, Any]) -> None: - super().__init__(config) - # Store location of GDASApp jinja2 templates - self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') - - def initialize(self) -> None: - super().initialize() - - def finalize(self) -> None: - super().finalize() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 823031ce47..e049a7d422 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -107,7 +107,7 @@ def _get_app_configs(self): configs += ['waveawipsbulls', 'waveawipsgridded'] if self.do_aero: - configs += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] + configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] if self.do_prep_obs_aero: configs += ['prepobsaero'] @@ -179,7 +179,7 @@ def get_task_names(self): gdas_tasks += wave_prep_tasks if self.do_aero and 'gdas' in self.aero_anl_runs: - gdas_tasks += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] + gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] if self.do_prep_obs_aero: gdas_tasks += ['prepobsaero'] @@ -218,7 +218,7 @@ def get_task_names(self): gfs_tasks += wave_prep_tasks if self.do_aero and 'gfs' in self.aero_anl_runs: - gfs_tasks += ['aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] + gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] if self.do_prep_obs_aero: gfs_tasks += ['prepobsaero'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 72f1f78edf..960a7548ab 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -506,41 +506,13 @@ def prepobsaero(self): return task - def aeroanlgenb(self): - - deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - - resources = self.get_resource('aeroanlgenb') - task_name = f'{self.run}aeroanlgenb' - task_dict = {'task_name': task_name, - 'resources': resources, - 'dependency': dependencies, - 'envars': self.envars, - 'cycledef': 'gdas_half,gdas', - 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlgenb.sh', - 'job_name': f'{self.pslot}_{task_name}_@H', - 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', - 'maxtries': '&MAXTRIES;' - } - - task = rocoto.create_task(task_dict) - - return task - def aeroanlinit(self): deps = [] - dep_dict = {'type': 'task', 'name': 'gdasaeroanlgenb', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} - deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}prep'} - deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_prep_obs_aero: dep_dict = {'type': 'task', 'name': f'{self.run}prepobsaero'} - deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('aeroanlinit') @@ -560,28 +532,21 @@ def aeroanlinit(self): return task - def aeroanlvar(self): + def aeroanlrun(self): deps = [] - dep_dict = { - 'type': 'task', 'name': f'gdasaeroanlgenb', - 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}", - } + dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlinit'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = { - 'type': 'task', 'name': f'{self.run}aeroanlinit', - } - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('aeroanlvar') - task_name = f'{self.run}aeroanlvar' + resources = self.get_resource('aeroanlrun') + task_name = f'{self.run}aeroanlrun' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlvar.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlrun.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -594,7 +559,7 @@ def aeroanlvar(self): def aeroanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlvar'} + dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 3b2fbf5420..353d2aa943 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', - 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', + 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'prepsnowobs', 'snowanl', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', From 960b8306cd2ae0e2f7714522e58b9869a988f538 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 7 Aug 2024 19:53:01 +0000 Subject: [PATCH 82/90] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index f3fa26d4d6..67e8c5071d 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit f3fa26d4d6693fcf451184d5ecabb86c1b4190ca +Subproject commit 67e8c5071d8bc37df1a862baaa4bbcd53ac63a7d From 6cb41781e1c4ec77e75816b714f93d9ad27880ff Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 7 Aug 2024 20:23:19 +0000 Subject: [PATCH 83/90] change hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 67e8c5071d..f62b9128a9 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 67e8c5071d8bc37df1a862baaa4bbcd53ac63a7d +Subproject commit f62b9128a98cce3d800dd90ad85753b6e178665f From 48b1cca7c577633a5d6b48b108580827c6d42234 Mon Sep 17 00:00:00 2001 From: Anil Kumar <108816337+AnilKumar-NOAA@users.noreply.github.com> Date: Tue, 13 Aug 2024 09:25:21 -0400 Subject: [PATCH 84/90] Add Gaea C5 to CI (#2814) CI Testing and Jenkinsfile for Gaea C5 --- ci/cases/pr/C48mx500_3DVarAOWCDA.yaml | 1 + ci/cases/pr/C96C48_hybatmaerosnowDA.yaml | 1 + ci/cases/pr/C96C48_ufs_hybatmDA.yaml | 1 + ci/cases/pr/C96_atm3DVar_extended.yaml | 1 + ci/platforms/config.gaea | 8 ++++++++ ci/scripts/check_ci.sh | 2 +- ci/scripts/driver.sh | 2 +- ci/scripts/driver_weekly.sh | 2 +- ci/scripts/run_ci.sh | 2 +- ci/scripts/utils/launch_java_agent.sh | 2 +- 10 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 ci/platforms/config.gaea diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml index fd056cf895..c8365e12a0 100644 --- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml @@ -19,5 +19,6 @@ arguments: skip_ci_on_hosts: - wcoss2 + - gaea - orion - hercules diff --git a/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml index 02c2e8d3ae..cf629fa7a1 100644 --- a/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml +++ b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml @@ -19,4 +19,5 @@ arguments: skip_ci_on_hosts: - orion + - gaea - hercules diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index d1556dc1d0..b5634642f3 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -19,6 +19,7 @@ arguments: skip_ci_on_hosts: - hera + - gaea - orion - hercules diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml index 994d3ef3a0..a1ebab7b44 100644 --- a/ci/cases/pr/C96_atm3DVar_extended.yaml +++ b/ci/cases/pr/C96_atm3DVar_extended.yaml @@ -18,5 +18,6 @@ arguments: skip_ci_on_hosts: - hera + - gaea - orion - hercules diff --git a/ci/platforms/config.gaea b/ci/platforms/config.gaea new file mode 100644 index 0000000000..cce109d494 --- /dev/null +++ b/ci/platforms/config.gaea @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/gpfs/f5/epic/proj-shared/global/GFS_CI_ROOT +export ICSDIR_ROOT=/gpfs/f5/epic/proj-shared/global/glopara/data/ICSDIR +export STMP="/gpfs/f5/epic/scratch/${USER}" +export SLURM_ACCOUNT=ufs-ard +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index 24c5e242c3..825d8f5e8b 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -21,7 +21,7 @@ REPO_URL=${REPO_URL:-"git@github.com:NOAA-EMC/global-workflow.git"} source "${HOMEgfs}/ush/detect_machine.sh" case ${MACHINE_ID} in - hera | orion | hercules | wcoss2) + hera | orion | hercules | wcoss2 | gaea) echo "Running Automated Testing on ${MACHINE_ID}" source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" ;; diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 8a99817325..acf54381b8 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -30,7 +30,7 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' source "${ROOT_DIR}/ush/detect_machine.sh" case ${MACHINE_ID} in - hera | orion | hercules | wcoss2) + hera | orion | hercules | wcoss2 | gaea) echo "Running Automated Testing on ${MACHINE_ID}" source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" ;; diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh index 6cd2493769..3193cc98ed 100755 --- a/ci/scripts/driver_weekly.sh +++ b/ci/scripts/driver_weekly.sh @@ -38,7 +38,7 @@ export PS4='+ $(basename ${BASH_SOURCE[0]})[${LINENO}]' source "${ROOT_DIR}/ush/detect_machine.sh" case ${MACHINE_ID} in - hera | orion | hercules | wcoss2) + hera | orion | hercules | wcoss2 | gaea) echo "Running Automated Testing on ${MACHINE_ID}" source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" ;; diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh index f109aa83d4..2da5fa2681 100755 --- a/ci/scripts/run_ci.sh +++ b/ci/scripts/run_ci.sh @@ -20,7 +20,7 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' source "${HOMEgfs}/ush/detect_machine.sh" case ${MACHINE_ID} in - hera | orion | hercules | wcoss2) + hera | orion | hercules | wcoss2 | gaea) echo "Running Automated Testing on ${MACHINE_ID}" source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" ;; diff --git a/ci/scripts/utils/launch_java_agent.sh b/ci/scripts/utils/launch_java_agent.sh index 81dbe002b6..183e671b9d 100755 --- a/ci/scripts/utils/launch_java_agent.sh +++ b/ci/scripts/utils/launch_java_agent.sh @@ -74,7 +74,7 @@ host=$(hostname) source "${HOMEgfs}/ush/detect_machine.sh" case ${MACHINE_ID} in - hera | orion | hercules | wcoss2) + hera | orion | hercules | wcoss2 | gaea) echo "Launch Jenkins Java Controler on ${MACHINE_ID}";; *) echo "Unsupported platform. Exiting with error." From 9280086bfc31ebab0ae3115792c8bde6682ab072 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Tue, 13 Aug 2024 09:28:53 -0400 Subject: [PATCH 85/90] Jenkins Pipeline Updates (#2815) Pipeline updates: - get `gh` location on remote machine and define global bash env variable `$GH` for GitHub CLI - Failed cases are now displayed accordingly in the Jenkins dashboard (see NOTE below) - Added the Build # in messaging for clarity when running from re-runs. - Replaced Matrix construct for concurrency with the parallel method that can use dynamic case lists - With removing of the hard coded list of cases we now get list of cases dynamically from the PR case directory - See new look of dashboard below (has more annotations and displays only used cases) NOTE: **failFast** (quitting all cases on failing of one) still does not work because it isn't quitting the running remote shells. We can make this a configurable capability in feature request with some custom code. The current behavior has the remaining cases continuing to run after a FAIL label has been issued and it is incumbent of the code manager to kill the CI job in the controller before resetting another Ready label. --------- Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- ci/Jenkinsfile | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 3049abd26b..38faadb1f0 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -1,7 +1,8 @@ def Machine = 'none' def machine = 'none' def CUSTOM_WORKSPACE = 'none' -def caseList = '' +def cases = '' +def GH = 'none' // Location of the custom workspaces for each machine in the CI system. They are persitent for each iteration of the PR. def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea'] def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI'] @@ -78,6 +79,7 @@ pipeline { echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) @@ -97,7 +99,7 @@ pipeline { } } stages { - stage('build system') { + stage('Building') { steps { catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { script { @@ -116,7 +118,7 @@ pipeline { checkout scm } catch (Exception e) { if (env.CHANGE_ID) { - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine}: ${e.getMessage()}" """) + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${e.getMessage()}" """) } STATUS = 'Failed' error("Failed to checkout: ${e.getMessage()}") @@ -149,7 +151,7 @@ pipeline { try { sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}") gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) } catch (Exception error_comment) { echo "Failed to comment on PR: ${error_comment.getMessage()}" } @@ -169,7 +171,7 @@ pipeline { } } if (system == 'gfs') { - caseList = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split() + cases = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split() } } } @@ -276,6 +278,7 @@ pipeline { } } } + stage( '5. FINALIZE' ) { agent { label NodeName[machine].toLowerCase() } @@ -291,7 +294,7 @@ pipeline { """, returnStatus: true) sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true) if (fileExists("${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log")) { - sh(script: """echo "**CI ${STATUS}** ${Machine} at
Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true) + sh(script: """echo "**CI ${STATUS}** on ${Machine} in Build# ${env.BUILD_NUMBER}
Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true) sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body-file ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log """, returnStatus: true) } if (STATUS == 'Passed') { From d8e0e829496c146b6313dceee43303e63edb48a3 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Tue, 13 Aug 2024 17:35:59 +0000 Subject: [PATCH 86/90] remove to_ymdh --- ush/python/pygfs/task/snowens_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 79fdfd1454..982f74130c 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -8,7 +8,7 @@ from wxflow import (AttrDict, FileHandler, - to_fv3time, to_YMD, to_timedelta, add_to_datetime, + to_fv3time, to_timedelta, add_to_datetime, rm_p, chdir, parse_j2yaml, save_as_yaml, Jinja, From 6f732a19c326222dd3bacca110eaad75b2bf57be Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 14 Aug 2024 15:11:14 +0000 Subject: [PATCH 87/90] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index f3fa26d4d6..c5794ab2ed 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit f3fa26d4d6693fcf451184d5ecabb86c1b4190ca +Subproject commit c5794ab2ed16442f9bbef90a546aa7aa3c928666 From 886a9f167effa7f80845701dad46b950fdb1a3fc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 15 Aug 2024 15:40:00 +0000 Subject: [PATCH 88/90] not worry about threads on wcoss --- env/WCOSS2.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 572862ca63..cf9feeca83 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -85,7 +85,7 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN}" export APRUN_APPLY_INCR="${launcher} -n 6" From 60ccca1ceafb0c6fd5b5d3b6e935336183d43478 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 21 Aug 2024 13:51:03 +0000 Subject: [PATCH 89/90] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index c5794ab2ed..0431b26650 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit c5794ab2ed16442f9bbef90a546aa7aa3c928666 +Subproject commit 0431b26650c5e5d4eb741304a05c841d3fda0ddc From 1cb436e77b925ec51d15e279d788a671dba332df Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 22 Aug 2024 18:40:11 +0000 Subject: [PATCH 90/90] skip wcoss2 for ci for now --- ci/cases/pr/C96C48_ufs_hybatmDA.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index 1f7179d8d1..f835b34593 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -22,4 +22,5 @@ skip_ci_on_hosts: - gaea - orion - hercules + - wcoss2