From a1bd5ca75b468e912df016e665175b8aeb4303b7 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Tue, 23 Nov 2021 13:45:42 -0500 Subject: [PATCH 01/15] Update template namelist files for newer ufs weather model (#642) * Update templates for new ufs weather model * Add option for pressure tendency diagnostic * Remove unnecessary namelist parameters * Modify model_configure * change nfhout to output_fh --- scripts/exregional_run_post.sh | 4 +- ush/create_model_configure_file.sh | 22 ++-- ush/templates/FV3.input.yml | 5 - ush/templates/input.nml.FV3 | 5 +- ush/templates/model_configure | 156 +++++++++-------------------- 5 files changed, 61 insertions(+), 131 deletions(-) diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 2632d7d55..025edd46e 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -234,8 +234,8 @@ tmmark="tm00" # minutes and seconds of the corresponding output forecast time. # # Note that if the forecast model is instructed to output at some hourly -# interval (via the nfhout and nfhout_hf parameters in the MODEL_CONFIG_FN -# file, with nsout set to a non-positive value), then the write-component +# interval (via the output_fh parameter in the MODEL_CONFIG_FN file, +# with nsout set to a non-positive value), then the write-component # output file names will not contain any suffix for the minutes and seconds. # For this reason, when SUB_HOURLY_POST is not set to "TRUE", mnts_sec_str # must be set to a null string. diff --git a/ush/create_model_configure_file.sh b/ush/create_model_configure_file.sh index ccd55d00f..a5d6c5d6f 100644 --- a/ush/create_model_configure_file.sh +++ b/ush/create_model_configure_file.sh @@ -114,6 +114,7 @@ run directory (run_dir): # settings="\ 'PE_MEMBER01': ${PE_MEMBER01} + 'print_esmf': ${dot_print_esmf_dot} 'start_year': $yyyy 'start_month': $mm 'start_day': $dd @@ -125,7 +126,6 @@ run directory (run_dir): 'restart_interval': ${RESTART_INTERVAL} 'write_dopost': ${dot_write_dopost} 'quilting': ${dot_quilting_dot} - 'print_esmf': ${dot_print_esmf_dot} 'output_grid': ${WRTCMP_output_grid}" # 'output_grid': \'${WRTCMP_output_grid}\'" # @@ -185,17 +185,15 @@ run directory (run_dir): # main time step dt_atmos (in units of seconds). Note that nsout is # guaranteed to be an integer because the experiment generation scripts # require that dt_subhourly_post_mnts (after conversion to seconds) be -# evenly divisible by dt_atmos. Also, in this case, the variable nfhout -# [which specifies the (low-frequency) output interval in hours after -# forecast hour nfhmax_hf; see the jinja model_config template file] is -# set to 0, although this doesn't matter because any positive of nsout -# will override nfhout. +# evenly divisible by dt_atmos. Also, in this case, the variable output_fh +# [which specifies the output interval in hours; +# see the jinja model_config template file] is set to 0, although this +# doesn't matter because any positive of nsout will override output_fh. # # If sub_hourly_post is set to "FALSE", then the workflow is hard-coded # (in the jinja model_config template file) to direct the forecast model -# to output files every hour. This is done by setting (1) nfhout_hf to -# 1 in that jinja template file, (2) nfhout to 1 here, and (3) nsout to -# -1 here which turns off output by time step interval. +# to output files every hour. This is done by setting (1) output_fh to 1 +# here, and (2) nsout to -1 here which turns off output by time step interval. # # Note that the approach used here of separating how hourly and subhourly # output is handled should be changed/generalized/simplified such that @@ -208,13 +206,13 @@ run directory (run_dir): # if [ "${sub_hourly_post}" = "TRUE" ]; then nsout=$(( dt_subhourly_post_mnts*60 / dt_atmos )) - nfhout=0 + output_fh=0 else - nfhout=1 + output_fh=1 nsout=-1 fi settings="${settings} - 'nfhout': ${nfhout} + 'output_fh': ${output_fh} 'nsout': ${nsout}" print_info_msg $VERBOSE " diff --git a/ush/templates/FV3.input.yml b/ush/templates/FV3.input.yml index 8e9b43fe5..ce8f833f5 100644 --- a/ush/templates/FV3.input.yml +++ b/ush/templates/FV3.input.yml @@ -288,11 +288,6 @@ FV3_GFS_v15p2: surf_map_nml: FV3_GFS_v16: - atmos_model_nml: - fhmax: 240 - fhmaxhf: 0 - fhout: 3 - fhouthf: 1 cires_ugwp_nml: launch_level: 27 fv_core_nml: diff --git a/ush/templates/input.nml.FV3 b/ush/templates/input.nml.FV3 index c4cec3329..2d35ca477 100644 --- a/ush/templates/input.nml.FV3 +++ b/ush/templates/input.nml.FV3 @@ -10,7 +10,6 @@ &atmos_model_nml chksum_debug = .false. dycore_only = .false. - fdiag = 1 / &cires_ugwp_nml @@ -71,6 +70,7 @@ do_schmidt = .true. do_vort_damp = .true. dwind_2d = .false. + dz_min = 2 external_eta = .true. external_ic = .true. fill = .true. @@ -109,6 +109,7 @@ p_fac = 0.1 phys_hydrostatic = .false. print_freq = 6 + psm_bc = 1 range_warn = .true. read_increment = .false. regional = .true. @@ -174,7 +175,6 @@ ltaerosol = .true. lwhtr = .true. n_var_lndp = 0 - ncld = 5 nsradar_reset = 3600 nst_anl = .true. nstf_name = 2,1,0,0,0 @@ -182,6 +182,7 @@ oz_phys_2015 = .true. pdfcld = .false. pre_rad = .false. + print_diff_pgr = .false. prslrd0 = 0.0 random_clds = .false. redrag = .true. diff --git a/ush/templates/model_configure b/ush/templates/model_configure index f26b9dd15..1b0f7a815 100644 --- a/ush/templates/model_configure +++ b/ush/templates/model_configure @@ -1,5 +1,6 @@ total_member: 1 PE_MEMBER01: {{ PE_MEMBER01 }} +print_esmf: {{ print_esmf }} start_year: {{ start_year }} start_month: {{ start_month }} start_day: {{ start_day }} @@ -14,14 +15,12 @@ cpl: {{ cpl }} calendar: 'julian' memuse_verbose: .false. atmos_nthreads: {{ atmos_nthreads }} -use_hyper_thread: .false. -debug_affinity: .true. restart_interval: {{ restart_interval }} output_1st_tstep_rst: .false. -print_esmf: {{ print_esmf }} write_dopost: {{ write_dopost }} +ideflate: 0 +nbits: 0 quilting: {{ quilting }} - {% if quilting %} # # Write-component (quilting) computational parameters. @@ -29,29 +28,15 @@ quilting: {{ quilting }} write_groups: {{ write_groups }} write_tasks_per_group: {{ write_tasks_per_group }} num_files: 2 -filename_base: 'dyn''phy' -output_file: 'netcdf' -write_nemsioflip: .false. -write_fsyncflag: .false. +filename_base: 'dyn' 'phy' +output_file: 'netcdf' 'netcdf' # # Write-component output frequency parameter definitions: # -# nfhout: -# Output frequency in hours after forecast hour "nfhmax_hf". -# -# nfhmax_hf: -# Number of forecast hours until output frequency "nfhout" takes affect. +# output_fh: Output frequency in hours. +# nsout: Output frequency in time steps (positive values override "output_fh"). # -# nfhout_hf: -# Output frequency in hours until forecast hour "nfhmax_hf". -# -# nsout: -# Output frequency in time steps (positive values override "nfhout" and -# "nfhout_hf"). -# -nfhout: {{ nfhout }} -nfhmax_hf: 60 -nfhout_hf: 1 +output_fh: {{ output_fh }} -1 nsout: {{ nsout }} # # Coordinate system used by the output grid. @@ -61,97 +46,48 @@ output_grid: '{{ output_grid }}' # Parameter definitions for an output grid of type "{{ output_grid }}": # {%- if output_grid == "lambert_conformal" %} -# cen_lon: -# Longitude of center of grid (degrees). -# -# cen_lat: -# Latitude of center of grid (degrees). -# -# stdlat1: -# Latitude of first standard parallel (degrees). -# -# stdlat2: -# Latitude of second standard parallel (degrees). -# -# nx: -# Number of grid cells along x-axis in Lambert conformal (x,y) plane. -# -# ny: -# Number of grid cells along y-axis in Lambert conformal (x,y) plane. -# -# lon1: -# Longitude of center of grid cell at bottom-left corner of grid (degrees). -# -# lat1: -# Latitude of center of grid cell at bottom-left corner of grid (degrees). -# -# dx: -# Grid cell size in x direction (meters). -# -# dy: -# Grid cell size in y direction (meters). +# cen_lon: Longitude of center of grid (degrees). +# cen_lat: Latitude of center of grid (degrees). +# stdlat1: Latitude of first standard parallel (degrees). +# stdlat2: Latitude of second standard parallel (degrees). +# nx: Number of grid cells along x-axis in Lambert conformal (x,y) plane. +# ny: Number of grid cells along y-axis in Lambert conformal (x,y) plane. +# lon1: Longitude of center of grid cell at bottom-left corner of grid (degrees). +# lat1: Latitude of center of grid cell at bottom-left corner of grid (degrees). +# dx: Grid cell size in x direction (meters). +# dy: Grid cell size in y direction (meters). # {%- elif output_grid == "regional_latlon" %} -# cen_lon: -# Longitude of center of grid (degrees). -# -# cen_lat: -# Latitude of center of grid (degrees). -# -# lon1: -# Longitude of center of lower-left (southwest) grid cell (degrees). -# -# lat1: -# Latitude of center of lower-left (southwest) grid cell (degrees). -# -# lon2: -# Longitude of center of upper-right (northeast) grid cell (degrees). -# -# lat2: -# Latitude of center of upper-right (northeast) grid cell (degrees). -# -# dlon: -# Longitudinal grid size (degrees). -# -# dlat: -# Latitudinal grid size (degrees). +# cen_lon: Longitude of center of grid (degrees). +# cen_lat: Latitude of center of grid (degrees). +# lon1: Longitude of center of lower-left (southwest) grid cell (degrees). +# lat1: Latitude of center of lower-left (southwest) grid cell (degrees). +# lon2: Longitude of center of upper-right (northeast) grid cell (degrees). +# lat2: Latitude of center of upper-right (northeast) grid cell (degrees). +# dlon: Longitudinal grid size (degrees). +# dlat: Latitudinal grid size (degrees). # {%- elif output_grid == "rotated_latlon" %} -# cen_lon: -# Longitude of center of grid, expressed in the NON-ROTATED latlon -# coordinate system (degrees). This is also the longitude of the point -# at which the equator and prime meridian of the ROTATED coordinate -# system intersect (i.e. the point at which the longitude and latitude -# in the ROTATED latlon coordinate system are both 0). -# -# cen_lat: -# Latitude of center of grid, expressed in the NON-ROTATED latlon -# coordinate system (degrees). This is also the latitude of the point -# at which the equator and prime meridian of the ROTATED coordinate system -# intersect (i.e. the point at which the longitude and latitude in the -# ROTATED latlon coordinate system are both 0). -# -# lon1: -# Longitude of center of lower-left grid cell, expressed in the ROTATED -# latlon coordinate system (degrees). -# -# lat1: -# Latitude of center of lower-left grid cell, expressed in the ROTATED -# latlon coordinate system (degrees). -# -# lon2: -# Longitude of center of upper-right grid cell, expressed in the ROTATED -# latlon coordinate system (degrees). -# -# lat2: -# Latitude of center of upper-right grid cell, expressed in the ROTATED -# latlon coordinate system (degrees). -# -# dlon: -# Longitudinal grid size in the ROTATED latlon coordinate system (degrees). -# -# dlat: -# Latitudinal grid size in the ROTATED latlon coordinate system (degrees). +# cen_lon: Longitude of center of grid, expressed in the NON-ROTATED latlon coordinate +# system (degrees). This is also the longitude of the point at which the +# equator and prime meridian of the ROTATED coordinate system intersect (i.e. +# the point at which the longitude and latitude in the ROTATED latlon +# coordinate system are both 0). +# cen_lat: Latitude of center of grid, expressed in the NON-ROTATED latlon coordinate +# system (degrees). This is also the latitude of the point at which the +# equator and prime meridian of the ROTATED coordinate system intersect (i.e. +# the point at which the longitude and latitude in the ROTATED latlon +# coordinate system are both 0). +# lon1: Longitude of center of lower-left grid cell, expressed in the ROTATED latlon +# coordinate system (degrees). +# lat1: Latitude of center of lower-left grid cell, expressed in the ROTATED latlon +# coordinate system (degrees). +# lon2: Longitude of center of upper-right grid cell, expressed in the ROTATED latlon +# coordinate system (degrees). +# lat2: Latitude of center of upper-right grid cell, expressed in the ROTATED latlon +# coordinate system (degrees). +# dlon: Longitudinal grid size in the ROTATED latlon coordinate system (degrees). +# dlat: Latitudinal grid size in the ROTATED latlon coordinate system (degrees). # {%- endif %} {%- if output_grid == "lambert_conformal" %} From c96e0e6f88f61ba6dd0b974c21a303e1bdae6f5a Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Mon, 6 Dec 2021 10:34:05 -0700 Subject: [PATCH 02/15] Add new DEBUG variable (#639) ## DESCRIPTION OF CHANGES: 1. Add a new experiment configuration variable named `DEBUG` to enable more in-depth debugging output from workflow scripts. Set default value of `DEBUG` in `config_defaults.sh` to `"FALSE"`. 2. In experiment generation scripts, change circumstances under which different messages are printed to screen (e.g. when `VERBOSE` is `"TRUE"`, when `DEBUG` is `"TRUE"`, or always). 3. In experiment generation scripts, for clarity add new informational messages and modify some existing ones. 4. In various scripts, change "set -x" to "set +x" to reduce output clutter. This can be changed back as necessary (e.g. for debugging). Note that if `DEBUG` is set to `"TRUE"`, `VERBOSE` will get reset to `"TRUE"` if necessary in order to also print out all the `VERBOSE` messages. ## TESTS CONDUCTED: Ran the WE2E test `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` as-is as well as with modifications to the default values of `VERBOSE` and `DEBUG`, as follows: 1. `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` as-is, i.e. using default values `VERBOSE="TRUE"` and `DEBUG="FALSE"`. 2. `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` modified with `VERBOSE="FALSE"` (and with default of `DEBUG="FALSE"`). 3. `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` modified with `DEBUG="TRUE"` (and with default of `VERBOSE="TRUE"`). 4. `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` modified with `DEBUG="TRUE"` and `VERBOSE="FALSE"` (which should get reset to `"TRUE"`). All tests were successful. The experiment generation log files (`log.generate_FV3LAM_wflow.sh`) were compared and differed in the expected ways. ## DOCUMENTATION: Necessary documentation of `DEBUG` is in `config_defaults.sh`. Created Issue #[640 ](https://github.com/NOAA-EMC/regional_workflow/issues/640)to also update rst documentation. --- jobs/JREGIONAL_GET_OBS_CCPA | 2 +- jobs/JREGIONAL_GET_OBS_MRMS | 2 +- jobs/JREGIONAL_GET_OBS_NDAS | 2 +- jobs/JREGIONAL_MAKE_ICS | 2 +- jobs/JREGIONAL_RUN_POST | 2 +- jobs/JREGIONAL_RUN_VX_ENSGRID | 2 +- jobs/JREGIONAL_RUN_VX_ENSGRID_MEAN | 2 +- jobs/JREGIONAL_RUN_VX_ENSGRID_PROB | 2 +- jobs/JREGIONAL_RUN_VX_ENSPOINT | 2 +- jobs/JREGIONAL_RUN_VX_ENSPOINT_MEAN | 2 +- jobs/JREGIONAL_RUN_VX_ENSPOINT_PROB | 2 +- jobs/JREGIONAL_RUN_VX_GRIDSTAT | 2 +- jobs/JREGIONAL_RUN_VX_POINTSTAT | 2 +- ush/bash_utils/print_input_args.sh | 9 ++- ush/bash_utils/set_file_param.sh | 22 +++---- ush/check_ruc_lsm.sh | 2 +- ush/config_defaults.sh | 14 +++- ush/create_diag_table_file.sh | 2 +- ush/create_model_configure_file.sh | 2 +- ush/generate_FV3LAM_wflow.sh | 95 ++++++++++++++++----------- ush/link_fix.sh | 2 +- ush/make_grid_mosaic_file.sh | 2 +- ush/set_FV3nml_sfc_climo_filenames.sh | 2 +- ush/set_FV3nml_stoch_params.sh | 2 +- ush/set_ozone_param.sh | 4 +- ush/set_thompson_mp_fix_files.sh | 2 +- ush/setup.sh | 70 ++++++++++++++++---- ush/valid_param_vals.sh | 1 + 28 files changed, 164 insertions(+), 93 deletions(-) diff --git a/jobs/JREGIONAL_GET_OBS_CCPA b/jobs/JREGIONAL_GET_OBS_CCPA index e63010d0d..bd1760ee0 100755 --- a/jobs/JREGIONAL_GET_OBS_CCPA +++ b/jobs/JREGIONAL_GET_OBS_CCPA @@ -29,7 +29,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_GET_OBS_MRMS b/jobs/JREGIONAL_GET_OBS_MRMS index dfe1f68a6..f5d634974 100755 --- a/jobs/JREGIONAL_GET_OBS_MRMS +++ b/jobs/JREGIONAL_GET_OBS_MRMS @@ -25,7 +25,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_GET_OBS_NDAS b/jobs/JREGIONAL_GET_OBS_NDAS index 9514931cc..ab44e21b8 100755 --- a/jobs/JREGIONAL_GET_OBS_NDAS +++ b/jobs/JREGIONAL_GET_OBS_NDAS @@ -25,7 +25,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 9c5125f41..a5e5894f1 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -17,7 +17,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index ed469dbe2..86a30470b 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSGRID b/jobs/JREGIONAL_RUN_VX_ENSGRID index 641df6f66..867c75fc8 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSGRID +++ b/jobs/JREGIONAL_RUN_VX_ENSGRID @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSGRID_MEAN b/jobs/JREGIONAL_RUN_VX_ENSGRID_MEAN index d8c4dcea0..a75558d92 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSGRID_MEAN +++ b/jobs/JREGIONAL_RUN_VX_ENSGRID_MEAN @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSGRID_PROB b/jobs/JREGIONAL_RUN_VX_ENSGRID_PROB index 7ae543314..a8a2c43be 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSGRID_PROB +++ b/jobs/JREGIONAL_RUN_VX_ENSGRID_PROB @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSPOINT b/jobs/JREGIONAL_RUN_VX_ENSPOINT index c240d7abe..fc30f076f 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSPOINT +++ b/jobs/JREGIONAL_RUN_VX_ENSPOINT @@ -24,7 +24,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSPOINT_MEAN b/jobs/JREGIONAL_RUN_VX_ENSPOINT_MEAN index 376d07b99..a7d937c12 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSPOINT_MEAN +++ b/jobs/JREGIONAL_RUN_VX_ENSPOINT_MEAN @@ -24,7 +24,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_ENSPOINT_PROB b/jobs/JREGIONAL_RUN_VX_ENSPOINT_PROB index 1a47cfd12..818e588e5 100755 --- a/jobs/JREGIONAL_RUN_VX_ENSPOINT_PROB +++ b/jobs/JREGIONAL_RUN_VX_ENSPOINT_PROB @@ -24,7 +24,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_GRIDSTAT b/jobs/JREGIONAL_RUN_VX_GRIDSTAT index b17fd2960..8537cb3ec 100755 --- a/jobs/JREGIONAL_RUN_VX_GRIDSTAT +++ b/jobs/JREGIONAL_RUN_VX_GRIDSTAT @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_VX_POINTSTAT b/jobs/JREGIONAL_RUN_VX_POINTSTAT index f995045ae..084543695 100755 --- a/jobs/JREGIONAL_RUN_VX_POINTSTAT +++ b/jobs/JREGIONAL_RUN_VX_POINTSTAT @@ -24,7 +24,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/print_input_args.sh b/ush/bash_utils/print_input_args.sh index 72b7744b3..957cec524 100644 --- a/ush/bash_utils/print_input_args.sh +++ b/ush/bash_utils/print_input_args.sh @@ -168,16 +168,15 @@ have been set as follows: # #----------------------------------------------------------------------- # -# If a global variable named VERBOSE is not defined, print out the mes- -# sage. If it is defined, print out the message only if VERBOSE is set -# to TRUE. +# If a global variable named DEBUG is not defined, print out the message. +# If it is defined, print out the message only if DEBUG is set to TRUE. # #----------------------------------------------------------------------- # - if [ -z ${VERBOSE+x} ]; then + if [ -z ${DEBUG+x} ]; then print_info_msg "$msg" else - print_info_msg "$VERBOSE" "$msg" + print_info_msg "$DEBUG" "$msg" fi # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/set_file_param.sh b/ush/bash_utils/set_file_param.sh index d45451483..3e0d13349 100644 --- a/ush/bash_utils/set_file_param.sh +++ b/ush/bash_utils/set_file_param.sh @@ -53,11 +53,11 @@ Incorrect number of arguments specified: Usage: - ${func_name} file_full_path param value + ${func_name} file_fp param value where the arguments are defined as follows: - file_full_path: + file_fp: Full path to the file in which the specified parameter's value will be set. @@ -76,7 +76,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - local file_full_path="$1" + local file_fp="$1" local param="$2" local value="$3" # @@ -86,7 +86,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - local file="${file_full_path##*/}" + local file="${file_fp##*/}" # #----------------------------------------------------------------------- # @@ -94,7 +94,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - print_info_msg "$VERBOSE" "\ + print_info_msg "$DEBUG" "\ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # #----------------------------------------------------------------------- @@ -141,7 +141,7 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." "${GLOBAL_VAR_DEFNS_FN}") regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" regex_replace="\1$value\5" -# set_bash_param "${file_full_path}" "$param" "$value" +# set_bash_param "${file_fp}" "$param" "$value" ;; # #----------------------------------------------------------------------- @@ -168,15 +168,15 @@ specified for this file: # #----------------------------------------------------------------------- # - grep -q -E "${regex_search}" "${file_full_path}" + grep -q -E "${regex_search}" "${file_fp}" if [ $? -eq 0 ]; then - $SED -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" + $SED -i -r -e "s%${regex_search}%${regex_replace}%" "${file_fp}" else print_err_msg_exit "\ -Specified file (file_full_path) does not contain the searched-for regu- -lar expression (regex_search): - file_full_path = \"${file_full_path}\" +The specified file (file_fp) does not contain the searched-for regular +expression (regex_search): + file_fp = \"${file_fp}\" param = \"$param\" value = \"$value\" regex_search = ${regex_search}" diff --git a/ush/check_ruc_lsm.sh b/ush/check_ruc_lsm.sh index 8d51cc01b..35e4db195 100644 --- a/ush/check_ruc_lsm.sh +++ b/ush/check_ruc_lsm.sh @@ -20,7 +20,7 @@ function check_ruc_lsm() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index ba3711f90..4bdfdfc6e 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1171,13 +1171,21 @@ PREEXISTING_DIR_METHOD="delete" # #----------------------------------------------------------------------- # -# Set VERBOSE. This is a flag that determines whether or not the experiment -# generation and workflow task scripts tend to print out more informational -# messages. +# Set flags for more detailed messages. Defintitions: +# +# VERBOSE: +# This is a flag that determines whether or not the experiment generation +# and workflow task scripts tend to print out more informational messages. +# +# DEBUG: +# This is a flag that determines whether or not very detailed debugging +# messages are printed to out. Note that if DEBUG is set to TRUE, then +# VERBOSE will also get reset to TRUE if it isn't already. # #----------------------------------------------------------------------- # VERBOSE="TRUE" +DEBUG="FALSE" # #----------------------------------------------------------------------- # diff --git a/ush/create_diag_table_file.sh b/ush/create_diag_table_file.sh index 3c409031a..41ca941bf 100644 --- a/ush/create_diag_table_file.sh +++ b/ush/create_diag_table_file.sh @@ -15,7 +15,7 @@ function create_diag_table_file() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/create_model_configure_file.sh b/ush/create_model_configure_file.sh index a5d6c5d6f..814187765 100644 --- a/ush/create_model_configure_file.sh +++ b/ush/create_model_configure_file.sh @@ -15,7 +15,7 @@ function create_model_configure_file() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index 0ee8eee0b..a9fc3a817 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -10,6 +10,15 @@ #----------------------------------------------------------------------- # function generate_FV3LAM_wflow() { +printf "\ +======================================================================== +======================================================================== + +Starting experiment generation... + +======================================================================== +======================================================================== +" # #----------------------------------------------------------------------- # @@ -20,10 +29,10 @@ function generate_FV3LAM_wflow() { #----------------------------------------------------------------------- # if [[ $(uname -s) == Darwin ]]; then - local scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) - else - local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) - fi + local scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) +else + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +fi local scrfunc_fn=$( basename "${scrfunc_fp}" ) local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # @@ -108,7 +117,6 @@ if [ $pyerrors -gt 0 ];then " fi - # #----------------------------------------------------------------------- # @@ -152,16 +160,26 @@ WFLOW_XML_FP="$EXPTDIR/${WFLOW_XML_FN}" # #----------------------------------------------------------------------- # -ensmem_indx_name="\"\"" -uscore_ensmem_name="\"\"" -slash_ensmem_subdir="\"\"" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then - ensmem_indx_name="mem" - uscore_ensmem_name="_mem#${ensmem_indx_name}#" - slash_ensmem_subdir="/mem#${ensmem_indx_name}#" -fi +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then -settings="\ + template_xml_fp="${TEMPLATE_DIR}/${WFLOW_XML_FN}" + + print_info_msg " +Creating rocoto workflow XML file (WFLOW_XML_FP) from jinja template XML +file (template_xml_fp): + template_xml_fp = \"${template_xml_fp}\" + WFLOW_XML_FP = \"${WFLOW_XML_FP}\"" + + ensmem_indx_name="\"\"" + uscore_ensmem_name="\"\"" + slash_ensmem_subdir="\"\"" + if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + ensmem_indx_name="mem" + uscore_ensmem_name="_mem#${ensmem_indx_name}#" + slash_ensmem_subdir="/mem#${ensmem_indx_name}#" + fi + + settings="\ # # Parameters needed by the job scheduler. # @@ -415,7 +433,7 @@ settings="\ 'first_fv3_file_tstr': "000:"`$DATE_UTIL -d "${DATE_FIRST_CYCL} +${DT_ATMOS} seconds" +%M:%S` " # End of "settings" variable. -print_info_msg $VERBOSE " + print_info_msg "$VERBOSE" " The variable \"settings\" specifying values of the rococo XML variables has been set as follows: #----------------------------------------------------------------------- @@ -423,12 +441,9 @@ settings = $settings" # -# Set the full path to the template rocoto XML file. Then call a python -# script to generate the experiment's actual XML file from this template -# file. +# Call the python script to generate the experiment's actual XML file +# from the jinja template file. # -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - template_xml_fp="${TEMPLATE_DIR}/${WFLOW_XML_FN}" $USHDIR/fill_jinja_template.py -q \ -u "${settings}" \ -t ${template_xml_fp} \ @@ -444,6 +459,7 @@ are: Namelist settings specified on command line: settings = $settings" + fi # #----------------------------------------------------------------------- @@ -453,7 +469,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg " +print_info_msg "$VERBOSE" " Creating symlink in the experiment directory (EXPTDIR) that points to the workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): EXPTDIR = \"${EXPTDIR}\" @@ -476,7 +492,7 @@ if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then # time_stamp=$( $DATE_UTIL "+%F_%T" ) crontab_backup_fp="$EXPTDIR/crontab.bak.${time_stamp}" - print_info_msg " + print_info_msg "$VERBOSE" " Copying contents of user cron table to backup file: crontab_backup_fp = \"${crontab_backup_fp}\"" if [ "$MACHINE" = "WCOSS_DELL_P3" ]; then @@ -522,9 +538,9 @@ added: else - print_info_msg " -Adding the following line to the cron table in order to automatically -resubmit FV3-LAM workflow: + print_info_msg "$VERBOSE" " +Adding the following line to the user's cron table in order to automatically +resubmit SRW workflow: CRONTAB_LINE = \"${CRONTAB_LINE}\"" if [ "$MACHINE" = "WCOSS_DELL_P3" ];then @@ -637,8 +653,8 @@ cp_vrfy "${FIELD_DICT_IN_UWM_FP}" "${FIELD_DICT_FP}" # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" " -Setting parameters in FV3 namelist file (FV3_NML_FP): +print_info_msg " +Setting parameters in weather model's namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. @@ -800,8 +816,8 @@ settings="$settings }" print_info_msg $VERBOSE " -The variable \"settings\" specifying values of the namelist variables -has been set as follows: +The variable \"settings\" specifying values of the weather model's +namelist variables has been set as follows: settings = $settings" @@ -886,15 +902,12 @@ print_info_msg " ======================================================================== ======================================================================== -Workflow generation completed. +Experiment generation completed. The experiment directory is: + + EXPTDIR=\"$EXPTDIR\" ======================================================================== ======================================================================== - -The experiment directory is: - - > EXPTDIR=\"$EXPTDIR\" - " # #----------------------------------------------------------------------- @@ -904,12 +917,16 @@ The experiment directory is: #----------------------------------------------------------------------- # if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + print_info_msg "\ To launch the workflow, first ensure that you have a compatible version of rocoto available. For most pre-configured platforms, rocoto can be loaded via a module: + > module load rocoto + For more details on rocoto, see the User's Guide. + To launch the workflow, first ensure that you have a compatible version of rocoto loaded. For example, to load version 1.3.1 of rocoto, use @@ -938,17 +955,15 @@ Note that: 2) In order for the output of the rocotostat command to be up-to-date, the rocotorun command must be issued immediately before the rocoto- stat command. -" -fi -print_info_msg " + For automatic resubmission of the workflow (say every 3 minutes), the following line can be added to the user's crontab (use \"crontab -e\" to edit the cron table): */3 * * * * cd $EXPTDIR && ./launch_FV3LAM_wflow.sh - -Done. " + +fi # # If necessary, run the NOMADS script to source external model data. # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 34bcd5a53..0bfd5c03d 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -16,7 +16,7 @@ function link_fix() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/make_grid_mosaic_file.sh b/ush/make_grid_mosaic_file.sh index 8a81feb14..5adf2ac30 100644 --- a/ush/make_grid_mosaic_file.sh +++ b/ush/make_grid_mosaic_file.sh @@ -17,7 +17,7 @@ function make_grid_mosaic_file() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/set_FV3nml_sfc_climo_filenames.sh b/ush/set_FV3nml_sfc_climo_filenames.sh index 13f2ecdf5..d6f9dc5d4 100644 --- a/ush/set_FV3nml_sfc_climo_filenames.sh +++ b/ush/set_FV3nml_sfc_climo_filenames.sh @@ -20,7 +20,7 @@ function set_FV3nml_sfc_climo_filenames() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/set_FV3nml_stoch_params.sh b/ush/set_FV3nml_stoch_params.sh index cfe115d52..8f08bb566 100644 --- a/ush/set_FV3nml_stoch_params.sh +++ b/ush/set_FV3nml_stoch_params.sh @@ -22,7 +22,7 @@ function set_FV3nml_stoch_params() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/set_ozone_param.sh b/ush/set_ozone_param.sh index c85749279..bd6f5fd37 100644 --- a/ush/set_ozone_param.sh +++ b/ush/set_ozone_param.sh @@ -34,7 +34,7 @@ function set_ozone_param() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -209,7 +209,7 @@ files in the FIXam directory is: " msg="$msg"$( printf "\"%s\" \\\\\n" "${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]}" ) msg="$msg"$( printf "\n)" ) - print_info_msg "$msg" + print_info_msg "$VERBOSE" "$msg" else diff --git a/ush/set_thompson_mp_fix_files.sh b/ush/set_thompson_mp_fix_files.sh index 667ad522f..7bad26c2b 100644 --- a/ush/set_thompson_mp_fix_files.sh +++ b/ush/set_thompson_mp_fix_files.sh @@ -22,7 +22,7 @@ function set_thompson_mp_fix_files() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u -x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/setup.sh b/ush/setup.sh index f00e3bf8d..83843eea8 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -51,6 +51,11 @@ cd_vrfy ${scrfunc_dir} #----------------------------------------------------------------------- # . ./source_util_funcs.sh + +print_info_msg " +======================================================================== +Starting function ${func_name}() in \"${scrfunc_fn}\"... +========================================================================" # #----------------------------------------------------------------------- # @@ -152,6 +157,40 @@ fi # #----------------------------------------------------------------------- # +# Make sure that DEBUG is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "DEBUG" "valid_vals_DEBUG" +# +# Set DEBUG to either "TRUE" or "FALSE" so we don't have to consider +# other valid values later on. +# +DEBUG=$(echo_uppercase $DEBUG) +if [ "$DEBUG" = "TRUE" ] || \ + [ "$DEBUG" = "YES" ]; then + DEBUG="TRUE" +elif [ "$DEBUG" = "FALSE" ] || \ + [ "$DEBUG" = "NO" ]; then + DEBUG="FALSE" +fi +# +#----------------------------------------------------------------------- +# +# If DEBUG is set to "TRUE" but VERBOSE is set to "FALSE", reset VERBOSE +# to "TRUE" to print out all of the VERBOSE output (in addition to any +# DEBUG output). +# +#----------------------------------------------------------------------- +# +if [ "$DEBUG" = "TRUE" ] && [ "$VERBOSE" = "FALSE" ]; then + print_info_msg " +Resetting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"..." + VERBOSE="TRUE" +fi +# +#----------------------------------------------------------------------- +# # Make sure that USE_CRON_TO_RELAUNCH is set to a valid value. # #----------------------------------------------------------------------- @@ -792,9 +831,9 @@ fi # #----------------------------------------------------------------------- # -DATE_OR_NULL=$( printf "%s" "${DATE_FIRST_CYCL}" | \ +date_or_null=$( printf "%s" "${DATE_FIRST_CYCL}" | \ $SED -n -r -e "s/^([0-9]{8})$/\1/p" ) -if [ -z "${DATE_OR_NULL}" ]; then +if [ -z "${date_or_null}" ]; then print_err_msg_exit "\ DATE_FIRST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit @@ -802,9 +841,9 @@ month, and DD is the 2-digit day-of-month. DATE_FIRST_CYCL = \"${DATE_FIRST_CYCL}\"" fi -DATE_OR_NULL=$( printf "%s" "${DATE_LAST_CYCL}" | \ +date_or_null=$( printf "%s" "${DATE_LAST_CYCL}" | \ $SED -n -r -e "s/^([0-9]{8})$/\1/p" ) -if [ -z "${DATE_OR_NULL}" ]; then +if [ -z "${date_or_null}" ]; then print_err_msg_exit "\ DATE_LAST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit @@ -2549,8 +2588,9 @@ line_list=$( $SED -r \ -e "/^$/d" \ ${GLOBAL_VAR_DEFNS_FP} ) -print_info_msg "$VERBOSE" " -The variable \"line_list\" contains: +print_info_msg "$DEBUG" " +Before updating default values of experiment variables to user-specified +values, the variable \"line_list\" contains: ${line_list} " @@ -2593,6 +2633,17 @@ $SED -i -r -e "s|$regexp|\1\n\n${str_to_insert}\n|g" ${GLOBAL_VAR_DEFNS_FP} # # Loop through the lines in line_list. # +print_info_msg " +Generating the global experiment variable definitions file specified by +GLOBAL_VAR_DEFNS_FN: + GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" +Full path to this file is: + GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" +For more detailed information, set DEBUG to \"TRUE\" in the experiment +configuration file (\"${EXPT_CONFIG_FN}\")." + +template_var_names=() +template_var_values=() while read crnt_line; do # # Try to obtain the name of the variable being set on the current line. @@ -2604,16 +2655,13 @@ while read crnt_line; do # set to. # var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ ]*)=.*/\1/p" ) -#echo -#echo "============================" -#printf "%s\n" "var_name = \"${var_name}\"" # # If var_name is not empty, then a variable name was found in the cur- # rent line in line_list. # if [ ! -z $var_name ]; then - print_info_msg "$VERBOSE" " + print_info_msg "$DEBUG" " var_name = \"${var_name}\"" # # If the variable specified in var_name is set in the current environ- @@ -3087,7 +3135,7 @@ definitions file returned with a nonzero status." # print_info_msg " ======================================================================== -Setup script completed successfully!!! +Function ${func_name}() in \"${scrfunc_fn}\" completed successfully!!! ========================================================================" # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 1b3acf00a..8104038da 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -3,6 +3,7 @@ # valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") +valid_vals_DEBUG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_CRAY" "WCOSS_DELL_P3" "HERA" "ORION" "JET" "ODIN" "CHEYENNE" "STAMPEDE" "LINUX" "MACOS") valid_vals_SCHED=("slurm" "pbspro" "lsf" "lsfcray" "none") valid_vals_FCST_MODEL=("ufs-weather-model" "fv3gfs_aqm") From 23dec6e4e12975428c9b237aeba16ac031c93211 Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Mon, 6 Dec 2021 14:57:59 -0700 Subject: [PATCH 03/15] Toggle on/off workflow tasks (#645) ## DESCRIPTION OF CHANGES: 1. Add capability to turn on/off those workflow tasks that currently do not have this capability. These tasks are: a. Getting the external model files for creating initial conditions (`GET_EXTRN_ICS_TN`). b. Getting the external model files for creating boundary conditions (`GET_EXTRN_LBCS_TN`). c. Regridding/interpolating data from the external model IC files to obtain IC fields on the native grid (`MAKE_ICS_TN`). d. Regridding/interpolating data from the external model LBC files to obtain LBC fields on the native grid (`MAKE_LBCS_TN`). e. Running the forecast (`RUN_FCST`). Note that any remaining tasks in the workflow already have the capability to be toggled on/off. 2. Add a WE2E test (named `deactivate_tasks`) to test the ability to turn off tasks. In this test, all the tasks except `MAKE_GRID_TN`, `MAKE_OROG_TN`, and `MAKE_SFC_CLIMO_TN` are turned off (the latter 3 are not turned off because there is already a separate WE2E for turning those three off). ## TESTS CONDUCTED: Ran the new WE2E test (`deactivate_tasks`) successfully on Hera. ## DOCUMENTATION: Necessary documentation has been included in `config_defaults.sh`. Created Issue #646 to also update rst documentation. --- .../wflow_features/config.deactivate_tasks.sh | 26 ++++++ ush/config_defaults.sh | 26 ++++-- ush/generate_FV3LAM_wflow.sh | 5 ++ ush/setup.sh | 63 +++++++------ ush/templates/FV3LAM_wflow.xml | 88 +++++++++++-------- 5 files changed, 131 insertions(+), 77 deletions(-) create mode 100644 tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh new file mode 100644 index 000000000..52aad5cfa --- /dev/null +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh @@ -0,0 +1,26 @@ +# +# TEST PURPOSE/DESCRIPTION: +# ------------------------ +# +# This test ensures that the various workflow tasks can be deactivated, +# i.e. removed from the Rocoto XML. Note that we leave the MAKE_GRID_TN, +# MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN activated because there is a +# separate test for turning those off. +# + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="RRFS_CONUS_25km" +CCPP_PHYS_SUITE="FV3_GFS_v15p2" + +DATE_FIRST_CYCL="20190615" +DATE_LAST_CYCL="20190615" +CYCL_HRS=( "00" ) + +RUN_TASK_GET_EXTRN_ICS="FALSE" +RUN_TASK_GET_EXTRN_LBCS="FALSE" +RUN_TASK_MAKE_ICS="FALSE" +RUN_TASK_MAKE_LBCS="FALSE" +RUN_TASK_RUN_FCST="FALSE" +RUN_TASK_RUN_POST="FALSE" diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 4bdfdfc6e..8c2ba74aa 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1265,6 +1265,21 @@ VX_ENSPOINT_PROB_TN="run_enspointvx_prob" # SFC_CLIMO_DIR: # Same as GRID_DIR but for the MAKE_SFC_CLIMO_TN task. # +# RUN_TASK_GET_EXTRN_ICS: +# Flag that determines whether the GET_EXTRN_ICS_TN task is to be run. +# +# RUN_TASK_GET_EXTRN_LBCS: +# Flag that determines whether the GET_EXTRN_LBCS_TN task is to be run. +# +# RUN_TASK_MAKE_ICS: +# Flag that determines whether the MAKE_ICS_TN task is to be run. +# +# RUN_TASK_MAKE_LBCS: +# Flag that determines whether the MAKE_LBCS_TN task is to be run. +# +# RUN_TASK_RUN_FCST: +# Flag that determines whether the RUN_FCST_TN task is to be run. +# # RUN_TASK_RUN_POST: # Flag that determines whether the RUN_POST_TN task is to be run. # @@ -1296,20 +1311,19 @@ OROG_DIR="/path/to/pregenerated/orog/files" RUN_TASK_MAKE_SFC_CLIMO="TRUE" SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files" +RUN_TASK_GET_EXTRN_ICS="TRUE" +RUN_TASK_GET_EXTRN_LBCS="TRUE" +RUN_TASK_MAKE_ICS="TRUE" +RUN_TASK_MAKE_LBCS="TRUE" +RUN_TASK_RUN_FCST="TRUE" RUN_TASK_RUN_POST="TRUE" RUN_TASK_GET_OBS_CCPA="FALSE" - RUN_TASK_GET_OBS_MRMS="FALSE" - RUN_TASK_GET_OBS_NDAS="FALSE" - RUN_TASK_VX_GRIDSTAT="FALSE" - RUN_TASK_VX_POINTSTAT="FALSE" - RUN_TASK_VX_ENSGRID="FALSE" - RUN_TASK_VX_ENSPOINT="FALSE" # #----------------------------------------------------------------------- diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index a9fc3a817..1370a7bc4 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -361,6 +361,11 @@ file (template_xml_fp): 'run_task_make_grid': ${RUN_TASK_MAKE_GRID} 'run_task_make_orog': ${RUN_TASK_MAKE_OROG} 'run_task_make_sfc_climo': ${RUN_TASK_MAKE_SFC_CLIMO} + 'run_task_get_extrn_ics': ${RUN_TASK_GET_EXTRN_ICS} + 'run_task_get_extrn_lbcs': ${RUN_TASK_GET_EXTRN_LBCS} + 'run_task_make_ics': ${RUN_TASK_MAKE_ICS} + 'run_task_make_lbcs': ${RUN_TASK_MAKE_LBCS} + 'run_task_run_fcst': ${RUN_TASK_RUN_FCST} 'run_task_run_post': ${RUN_TASK_RUN_POST} 'run_task_get_obs_ccpa': ${RUN_TASK_GET_OBS_CCPA} 'run_task_get_obs_mrms': ${RUN_TASK_GET_OBS_MRMS} diff --git a/ush/setup.sh b/ush/setup.sh index 83843eea8..9cc997132 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -2608,10 +2608,10 @@ read -r -d '' str_to_insert << EOM #----------------------------------------------------------------------- #----------------------------------------------------------------------- # Section 1: -# This section is a copy of the default workflow/experiment configura- -# tion file config_defaults.sh in the shell scripts directory USHDIR ex- -# cept that variable values have been updated to those set by the setup -# script (setup.sh). +# This section is a copy of the default experiment configuration file +# (${EXPT_DEFAULT_CONFIG_FN}) in the shell scripts directory specified by USHDIR +# except that variable values have been updated to those for the experiment +# (as opposed to the default values). #----------------------------------------------------------------------- #----------------------------------------------------------------------- # @@ -2620,7 +2620,7 @@ EOM # Replace all occurrences of actual newlines in the variable str_to_insert # with escaped backslash-n. This is needed for the sed command below to # work properly (i.e. to avoid it failing with an "unterminated `s' command" -# message). +# error message). # str_to_insert=${str_to_insert//$'\n'/\\n} # @@ -2649,25 +2649,24 @@ while read crnt_line; do # Try to obtain the name of the variable being set on the current line. # This will be successful only if the line consists of one or more char- # acters representing the name of a variable (recall that in generating -# the variable line_list, all leading spaces in the lines in the file -# have been stripped out), followed by an equal sign, followed by zero -# or more characters representing the value that the variable is being -# set to. +# the variable line_list, leading spaces on each line were stripped out), +# followed by an equal sign, followed by zero or more characters +# representing the value that the variable is being set to. # var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ ]*)=.*/\1/p" ) # -# If var_name is not empty, then a variable name was found in the cur- -# rent line in line_list. +# If var_name is not empty, then a variable name was found on the current +# line in line_list. # if [ ! -z $var_name ]; then print_info_msg "$DEBUG" " var_name = \"${var_name}\"" # -# If the variable specified in var_name is set in the current environ- -# ment (to either an empty or non-empty string), get its value and in- -# sert it in the variable definitions file on the line where that varia- -# ble is defined. Note that +# If the variable specified in var_name is set in the current environment +# (to either an empty or non-empty string), get its value and insert it +# in the variable definitions file on the line where that variable is +# defined. Note that # # ${!var_name+x} # @@ -2687,13 +2686,13 @@ var_name = \"${var_name}\"" # # We will now set the variable var_value to the string that needs to be # placed on the right-hand side of the assignment operator (=) on the -# appropriate line in variable definitions file. How this is done de- -# pends on whether the variable is a scalar or an array. +# appropriate line in the variable definitions file. How this is done +# depends on whether the variable is a scalar or an array. # # If the variable contains only one element, then it is a scalar. (It -# could be a 1-element array, but it is simpler to treat it as a sca- -# lar.) In this case, we enclose its value in double quotes and save -# the result in var_value. +# could be a 1-element array, but for simplicity, we treat that case as +# a scalar.) In this case, we enclose its value in double quotes and +# save the result in var_value. # if [ "$num_elems" -eq 1 ]; then var_value="${!var_name}" @@ -2707,8 +2706,8 @@ var_name = \"${var_name}\"" # # 2) Place parentheses around the double-quoted list of array elements # generated in the first step. Note that there is no need to put a -# space before the closing parenthesis because in step 1, we have al- -# ready placed a space after the last element. +# space before the closing parenthesis because in step 1, we have +# already placed a space after the last element. # else @@ -2731,16 +2730,16 @@ var_name = \"${var_name}\"" fi # -# If the variable specified in var_name is not set in the current envi- -# ronment (to either an empty or non-empty string), get its value and -# insert it in the variable definitions file on the line where that va- -# riable is defined. +# If for some reason the variable specified in var_name is not set in +# the current environment (to either an empty or non-empty string), below +# we will still include it in the variable definitions file and simply +# set it to a null string. Thus, here, we set its value (var_value) to +# an empty string). In this case, we also issue an informational message. # else print_info_msg " -The variable specified by \"var_name\" is not set in the current envi- -ronment: +The variable specified by \"var_name\" is not set in the current environment: var_name = \"${var_name}\" Setting its value in the variable definitions file to an empty string." @@ -2749,13 +2748,13 @@ Setting its value in the variable definitions file to an empty string." fi # # Now place var_value on the right-hand side of the assignment statement -# on the appropriate line in variable definitions file. +# on the appropriate line in the variable definitions file. # set_file_param "${GLOBAL_VAR_DEFNS_FP}" "${var_name}" "${var_value}" # -# If var_name is empty, then a variable name was not found in the cur- -# rent line in line_list. In this case, print out a warning and move on -# to the next line. +# If var_name is empty, then a variable name was not found on the current +# line in line_list. In this case, print out a warning and move on to +# the next line. # else diff --git a/ush/templates/FV3LAM_wflow.xml b/ush/templates/FV3LAM_wflow.xml index cc77b5ad4..e136ff8d8 100644 --- a/ush/templates/FV3LAM_wflow.xml +++ b/ush/templates/FV3LAM_wflow.xml @@ -22,45 +22,45 @@ Parameters needed by the job scheduler. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + &RSRV_HPSS; {%- if machine in ["WCOSS_CRAY"] %} @@ -291,7 +291,7 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRV_HPSS; {%- if machine in ["WCOSS_CRAY"] %} @@ -331,7 +331,7 @@ MODULES_RUN_TASK_FP script. {%- endif %} {%- if run_task_make_ics %} - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&MAKE_ICS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_ICS" @@ -375,7 +375,7 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&MAKE_LBCS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_LBCS" @@ -419,7 +419,7 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRV_FCST; &LOAD_MODULES_RUN_TASK_FP; "&RUN_FCST_TN;" "&JOBSDIR;/JREGIONAL_RUN_FCST" @@ -474,7 +474,7 @@ later below for other output times. 000 00 - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&RUN_POST_TN;" "&JOBSDIR;/JREGIONAL_RUN_POST" @@ -528,7 +528,7 @@ for other output times. {% for min in range(delta_min, 60, delta_min) %}{{ " %02d" % min }}{% endfor %} - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&RUN_POST_TN;" "&JOBSDIR;/JREGIONAL_RUN_POST" @@ -573,7 +573,7 @@ output hours (and all output minutes for each such hour). {% for h in range(1, fcst_len_hrs) %}{{ " %03d" % h }}{% endfor %} {% for min in range(0, 60, delta_min) %}{{ " %02d" % min }}{% endfor %} - + {%- else %} {% for h in range(0, fcst_len_hrs+1) %}{{ " %03d" % h }}{% endfor %} - + {%- endif %} &RSRV_DEFAULT; @@ -655,7 +655,7 @@ the tag to be identical to the ones above for other output times. {{ "%03d" % fcst_len_hrs }} 00 - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&RUN_POST_TN;" "&JOBSDIR;/JREGIONAL_RUN_POST" @@ -695,7 +695,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_HPSS; {%- if machine in ["WCOSS_CRAY"] %} @@ -728,7 +728,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_HPSS; {%- if machine in ["WCOSS_CRAY"] %} @@ -762,7 +762,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_HPSS; {%- if machine in ["WCOSS_CRAY"] %} @@ -794,7 +794,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -844,7 +844,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -893,7 +893,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -942,7 +942,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -977,7 +977,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -1012,7 +1012,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT" @@ -1047,7 +1047,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_POINTSTAT" @@ -1099,7 +1099,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1131,7 +1131,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1163,7 +1163,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1195,7 +1195,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1226,7 +1226,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1256,7 +1256,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID" @@ -1285,7 +1285,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_MEAN" @@ -1316,7 +1316,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1347,7 +1347,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_MEAN" @@ -1378,7 +1378,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1410,7 +1410,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_MEAN" @@ -1441,7 +1441,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1473,7 +1473,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_MEAN" @@ -1504,7 +1504,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1535,7 +1535,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1565,7 +1565,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSGRID_PROB" @@ -1596,7 +1596,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSPOINT" @@ -1624,7 +1624,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSPOINT_MEAN" @@ -1652,7 +1652,7 @@ the tag to be identical to the ones above for other output times. ************************************************************************ ************************************************************************ --> - + &RSRV_DEFAULT; &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_ENSPOINT_PROB" From bb54e59fc7168cb47067df7386d6e32fac794ee0 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Mon, 13 Dec 2021 14:36:40 -0500 Subject: [PATCH 06/15] Make thompson_mynn_lam3km ccpp suite available (#644) * Add FV3_GFS_v15_thompson_mynn_lam3km * Add a we2e test for thompson_mynn_lam3km * Update namelist files * Add MERRA2 data * Fix typos * Move the SDF line in scripts * remove change of nstf_name --- scripts/exregional_make_ics.sh | 1 + scripts/exregional_make_lbcs.sh | 1 + scripts/exregional_run_fcst.sh | 22 ++ ...3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh | 25 ++ ush/config_defaults.sh | 8 + ush/generate_FV3LAM_wflow.sh | 22 +- ush/setup.sh | 28 +- ush/templates/FV3.input.yml | 48 ++- ...iag_table.FV3_GFS_v15_thompson_mynn_lam3km | 340 ++++++++++++++++++ ...eld_table.FV3_GFS_v15_thompson_mynn_lam3km | 51 +++ ush/templates/input.nml.FV3 | 2 + ush/valid_param_vals.sh | 1 + 12 files changed, 544 insertions(+), 5 deletions(-) create mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh create mode 100644 ush/templates/diag_table.FV3_GFS_v15_thompson_mynn_lam3km create mode 100644 ush/templates/field_table.FV3_GFS_v15_thompson_mynn_lam3km diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index c82a987ad..67915c609 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -187,6 +187,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_GSD_SAR" | \ "FV3_RRFS_v1alpha" | \ "FV3_RRFS_v1beta" | \ + "FV3_GFS_v15_thompson_mynn_lam3km" | \ "FV3_HRRR" ) if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 01aeaed96..c28fe24eb 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -186,6 +186,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_GSD_SAR" | \ "FV3_RRFS_v1alpha" | \ "FV3_RRFS_v1beta" | \ + "FV3_GFS_v15_thompson_mynn_lam3km" | \ "FV3_HRRR" ) if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 5a91b26c3..8aa05ce34 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -405,6 +405,28 @@ done # #----------------------------------------------------------------------- # +# Create links in the current run directory to the MERRA2 aerosol +# climatology data files and lookup table for optics properties. +# +#----------------------------------------------------------------------- +# +for f_nm_path in ${FIXclim}/*; do + f_nm=$( basename "${f_nm_path}" ) + pre_f="${f_nm%%.*}" + + if [ "${pre_f}" = "merra2" ]; then + mnth=$( printf "%s\n" "${f_nm}" | grep -o -P '(?<=2014.m).*(?=.nc)' ) + symlink="${run_dir}/aeroclim.m${mnth}.nc" + else + symlink="${run_dir}/${pre_f}.dat" + fi + target="${f_nm_path}" + create_symlink_to_file target="$target" symlink="$symlink" \ + relative="${relative_link_flag}" +done +# +#----------------------------------------------------------------------- +# # If running this cycle/ensemble member combination more than once (e.g. # using rocotoboot), remove any time stamp file that may exist from the # previous attempt. diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh new file mode 100644 index 000000000..f391b2688 --- /dev/null +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh @@ -0,0 +1,25 @@ +# +# TEST PURPOSE/DESCRIPTION: +# ------------------------ +# +# This test is to ensure that the workflow running in community mode +# completes successfully on the RRFS_CONUS_3km grid using the GFS_v15_ +# thompson_mynn_lam3km physics suite with ICs and LBCs derived from FV3GFS. +# + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="RRFS_CONUS_3km" +CCPP_PHYS_SUITE="FV3_GFS_v15_thompson_mynn_lam3km" + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" +USE_USER_STAGED_EXTRN_FILES="TRUE" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +FCST_LEN_HRS="6" +LBC_SPEC_INTVL_HRS="3" diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 56b54ad68..702cf4db8 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1357,6 +1357,12 @@ SFC_CLIMO_FIELDS=( \ # System directory in which the majority of fixed (i.e. time-independent) # files that are needed to run the FV3-LAM model are located # +# FIXaer: +# System directory where MERRA2 aerosol climatology files are located +# +# FIXlut: +# System directory where the lookup tables for optics properties are located +# # TOPO_DIR: # The location on disk of the static input files used by the make_orog # task (orog.x and shave.x). Can be the same as FIXgsm. @@ -1416,6 +1422,8 @@ SFC_CLIMO_FIELDS=( \ # to a null string which will then be overwritten in setup.sh unless the # user has specified a different value in config.sh FIXgsm="" +FIXaer="" +FIXlut="" TOPO_DIR="" SFC_CLIMO_INPUT_DIR="" diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index 4d9054bbc..d19a58b52 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -614,6 +614,25 @@ fi # #----------------------------------------------------------------------- # +# Copy MERRA2 aerosol climatology data. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Copying MERRA2 aerosol climatology data files from system directory +(FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: + FIXaer = \"${FIXaer}\" + FIXlut = \"${FIXlut}\" + FIXclim = \"${FIXclim}\"" + +check_for_preexist_dir_file "${FIXclim}" "delete" +mkdir_vrfy -p "${FIXclim}" + +cp_vrfy "${FIXaer}/merra2.aerclim"*".nc" "${FIXclim}/" +cp_vrfy "${FIXlut}/optics"*".dat" "${FIXclim}/" +# +#----------------------------------------------------------------------- +# # Copy templates of various input files to the experiment directory. # #----------------------------------------------------------------------- @@ -630,8 +649,7 @@ print_info_msg "$VERBOSE" " cp_vrfy "${FIELD_TABLE_TMPL_FP}" "${FIELD_TABLE_FP}" print_info_msg "$VERBOSE" " - Copying the template NEMS configuration file to the experiment direct- - ory..." + Copying the template NEMS configuration file to the experiment directory..." cp_vrfy "${NEMS_CONFIG_TMPL_FP}" "${NEMS_CONFIG_FP}" # # Copy the CCPP physics suite definition file from its location in the diff --git a/ush/setup.sh b/ush/setup.sh index c68e2cbed..ee49750ae 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1013,6 +1013,8 @@ case "$MACHINE" in "WCOSS_CRAY") FIXgsm=${FIXgsm:-"/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_am"} + FIXaer=${FIXaer:-"/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_aer"} + FIXlut=${FIXlut:-"/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_sfc_climo"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1020,6 +1022,8 @@ case "$MACHINE" in "WCOSS_DELL_P3") FIXgsm=${FIXgsm:-"/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_am"} + FIXaer=${FIXaer:-"/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_aer"} + FIXlut=${FIXlut:-"/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_sfc_climo"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1027,6 +1031,8 @@ case "$MACHINE" in "HERA") FIXgsm=${FIXgsm:-"/scratch1/NCEPDEV/global/glopara/fix/fix_am"} + FIXaer=${FIXaer:-"/scratch1/NCEPDEV/global/glopara/fix/fix_aer"} + FIXlut=${FIXlut:-"/scratch1/NCEPDEV/global/glopara/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/scratch1/NCEPDEV/global/glopara/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/scratch1/NCEPDEV/global/glopara/fix/fix_sfc_climo"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/scratch2/BMC/det/FV3LAM_pregen"} @@ -1034,6 +1040,8 @@ case "$MACHINE" in "ORION") FIXgsm=${FIXgsm:-"/work/noaa/global/glopara/fix/fix_am"} + FIXaer=${FIXaer:-"/work/noaa/global/glopara/fix/fix_aer"} + FIXlut=${FIXlut:-"/work/noaa/global/glopara/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/work/noaa/global/glopara/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/work/noaa/global/glopara/fix/fix_sfc_climo"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1041,6 +1049,8 @@ case "$MACHINE" in "JET") FIXgsm=${FIXgsm:-"/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix/fix_am"} + FIXaer=${FIXaer:-"/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix/fix_aer"} + FIXlut=${FIXlut:-"/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix/fix_sfc_climo"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1048,6 +1058,8 @@ case "$MACHINE" in "ODIN") FIXgsm=${FIXgsm:-"/scratch/ywang/fix/theia_fix/fix_am"} + FIXaer=${FIXaer:-"/scratch/ywang/fix/theia_fix/fix_aer"} + FIXlut=${FIXlut:-"/scratch/ywang/fix/theia_fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/scratch/ywang/fix/theia_fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/scratch/ywang/fix/climo_fields_netcdf"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1055,6 +1067,8 @@ case "$MACHINE" in "CHEYENNE") FIXgsm=${FIXgsm:-"/glade/p/ral/jntp/UFS_CAM/fix/fix_am"} + FIXaer=${FIXaer:-"/glade/p/ral/jntp/UFS_CAM/fix/fix_aer"} + FIXlut=${FIXlut:-"/glade/p/ral/jntp/UFS_CAM/fix/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/glade/p/ral/jntp/UFS_CAM/fix/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/glade/p/ral/jntp/UFS_CAM/fix/climo_fields_netcdf"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} @@ -1062,17 +1076,21 @@ case "$MACHINE" in "STAMPEDE") FIXgsm=${FIXgsm:-"/work/00315/tg455890/stampede2/regional_fv3/fix_am"} + FIXaer=${FIXaer:-"/work/00315/tg455890/stampede2/regional_fv3/fix_aer"} + FIXlut=${FIXlut:-"/work/00315/tg455890/stampede2/regional_fv3/fix_lut"} TOPO_DIR=${TOPO_DIR:-"/work/00315/tg455890/stampede2/regional_fv3/fix_orog"} SFC_CLIMO_INPUT_DIR=${SFC_CLIMO_INPUT_DIR:-"/work/00315/tg455890/stampede2/regional_fv3/climo_fields_netcdf"} FIXLAM_NCO_BASEDIR=${FIXLAM_NCO_BASEDIR:-"/needs/to/be/specified"} ;; *) - if [ -z "$FIXgsm" -o -z "$TOPO_DIR" -o -z "$SFC_CLIMO_INPUT_DIR" ]; then + if [ -z "$FIXgsm" -o -z "$FIXaer" -o -z "$FIXlut" -o -z "$TOPO_DIR" -o -z "$SFC_CLIMO_INPUT_DIR" ]; then print_err_msg_exit "\ One or more fix file directories have not been specified for this machine: MACHINE = \"$MACHINE\" FIXgsm = \"${FIXgsm:-\"\"} + FIXaer = \"${FIXaer:-\"\"} + FIXlut = \"${FIXlut:-\"\"} TOPO_DIR = \"${TOPO_DIR:-\"\"} SFC_CLIMO_INPUT_DIR = \"${SFC_CLIMO_INPUT_DIR:-\"\"} FIXLAM_NCO_BASEDIR = \"${FIXLAM_NCO_BASEDIR:-\"\"} @@ -1473,6 +1491,10 @@ check_for_preexist_dir_file "$EXPTDIR" "${PREEXISTING_DIR_METHOD}" # the fixed files containing various fields on global grids (which are # usually much coarser than the native FV3-LAM grid). # +# FIXclim: +# This is the directory that will contain the MERRA2 aerosol climatology +# data file and lookup tables for optics properties +# # FIXLAM: # This is the directory that will contain the fixed files or symlinks to # the fixed files containing the grid, orography, and surface climatology @@ -1507,6 +1529,7 @@ check_for_preexist_dir_file "$EXPTDIR" "${PREEXISTING_DIR_METHOD}" LOGDIR="${EXPTDIR}/log" FIXam="${EXPTDIR}/fix_am" +FIXclim="${EXPTDIR}/fix_clim" FIXLAM="${EXPTDIR}/fix_lam" if [ "${RUN_ENVIR}" = "nco" ]; then @@ -2857,8 +2880,11 @@ PARMDIR="$PARMDIR" MODULES_DIR="${MODULES_DIR}" EXECDIR="$EXECDIR" FIXam="$FIXam" +FIXclim="$FIXclim" FIXLAM="$FIXLAM" FIXgsm="$FIXgsm" +FIXaer="$FIXaer" +FIXlut="$FIXlut" COMROOT="$COMROOT" COMOUT_BASEDIR="${COMOUT_BASEDIR}" TEMPLATE_DIR="${TEMPLATE_DIR}" diff --git a/ush/templates/FV3.input.yml b/ush/templates/FV3.input.yml index ce8f833f5..b971671dd 100644 --- a/ush/templates/FV3.input.yml +++ b/ush/templates/FV3.input.yml @@ -62,8 +62,6 @@ FV3_HRRR: kord_wz: 9 nord_tr: 2 nrows_blend: 10 - regional_bcs_from_gsi: False - write_restart_with_bcs: False gfs_physics_nml: <<: *gsd_sar_phys cdmbgwd: [3.5, 1.0] @@ -287,6 +285,52 @@ FV3_GFS_v15p2: ldebug: False surf_map_nml: +FV3_GFS_v15_thompson_mynn_lam3km: + atmos_model_nml: + avg_max_length: 3600.0 + fv_core_nml: + agrid_vel_rst: true + full_zs_filter: !!python/none + n_sponge: 9 + npz_type: '' + rf_fast: false + sg_cutoff: 10000.0 + vtdm4: 0.02 + gfs_physics_nml: + avg_max_length: 3600.0 + cdmbgwd: [0.88, 0.04] + do_deep: false + do_mynnsfclay: true + do_ugwp: false + fhswr: 900.0 + fhlwr: 900.0 + iaer: 1011 + iccn: 2 + icliq_sw: 2 + imfdeepcnv: 2 + imfshalcnv: 2 + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + iovr: 3 + ldiag_ugwp: false + lgfdlmprad: false + lsm: 1 + lsoil_lsm: !!python/none + ltaerosol: false + xkzminv: 0.3 + xkzm_m: 1.0 + xkzm_h: 1.0 + FV3_GFS_v16: cires_ugwp_nml: launch_level: 27 diff --git a/ush/templates/diag_table.FV3_GFS_v15_thompson_mynn_lam3km b/ush/templates/diag_table.FV3_GFS_v15_thompson_mynn_lam3km new file mode 100644 index 000000000..84acd3caf --- /dev/null +++ b/ush/templates/diag_table.FV3_GFS_v15_thompson_mynn_lam3km @@ -0,0 +1,340 @@ +{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ starttime.strftime("%Y %m %d %H %M %S") }} + +#output files +"grid_spec", -1, "months", 1, "days", "time" +#"atmos_4xdaily", 6, "hours", 1, "days", "time" +"atmos_static", -1, "hours", 1, "hours", "time" +"fv3_history", 3, "hours", 1, "hours", "time" +"fv3_history2d", 3, "hours", 1, "hours", "time" + +# +#======================= +# ATMOSPHERE DIAGNOSTICS +#======================= +### +# grid_spec +### + "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, + "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, +### +# 4x daily output +### +# "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "z1000", "z1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z850", "z850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z700", "z700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z500", "z500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z200", "z200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z100", "z100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z50", "z50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z10", "z10", "atmos_4xdaily", "all", .false., "none", 2 +#### +#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 +### +# gfs static data +### + "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 + "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 + "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 + "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 + "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 +### +# FV3 variabls needed for NGGPS evaluation +### +"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "q_rimef", "q_rimef", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "pfnh", "pfnh", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "cld_amt", "cld_amt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "wmaxup", "upvvelmax", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "wmaxdn", "dnvvelmax", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmax03", "uhmax03", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmax25", "uhmax25", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmin03", "uhmin03", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmin25", "uhmin25", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvort01", "maxvort01", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvort02", "maxvort02", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvorthy1", "maxvorthy1", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ustm", "ustm", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vstm", "vstm", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "srh01", "srh01", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "srh03", "srh03", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "u10max", "u10max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10max", "v10max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spd10max", "spd10max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "refdmax", "refdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "refdmax263k","refdmax263k","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "t02max", "t02max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "t02min", "t02min", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rh02max", "rh02max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rh02min", "rh02min", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "refl_10cm", "refl_10cm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pratemax", "pratemax", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pwat", "pwatclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 +#"gfs_phys", "cnvw", "cnvcldwat", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 +# Aerosols (CCN, IN) from Thompson microphysics +#"gfs_phys", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2 +#"gfs_phys", "nifa", "nifa", "fv3_history", "all", .false., "none", 2 +"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2 + +#============================================================================================= +# +#====> This file can be used with diag_manager/v2.0a (or higher) <==== +# +# +# FORMATS FOR FILE ENTRIES (not all input values are used) +# ------------------------ +# +#"file_name", output_freq, "output_units", format, "time_units", "long_name", +# +# +#output_freq: > 0 output frequency in "output_units" +# = 0 output frequency every time step +# =-1 output frequency at end of run +# +#output_units = units used for output frequency +# (years, months, days, minutes, hours, seconds) +# +#time_units = units used to label the time axis +# (days, minutes, hours, seconds) +# +# +# FORMAT FOR FIELD ENTRIES (not all input values are used) +# ------------------------ +# +#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing +# +#time_avg = .true. or .false. +# +#packing = 1 double precision +# = 2 float +# = 4 packed 16-bit integers +# = 8 packed 1-byte (not tested?) diff --git a/ush/templates/field_table.FV3_GFS_v15_thompson_mynn_lam3km b/ush/templates/field_table.FV3_GFS_v15_thompson_mynn_lam3km new file mode 100644 index 000000000..6fea17437 --- /dev/null +++ b/ush/templates/field_table.FV3_GFS_v15_thompson_mynn_lam3km @@ -0,0 +1,51 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ice water mixing ratio + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic snow water mixing ratio + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic Grau water mixing ratio + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud ice number concentration + "TRACER", "atmos_mod", "ice_nc" + "longname", "cloud ice water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic rain number concentration + "TRACER", "atmos_mod", "rain_nc" + "longname", "rain number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/input.nml.FV3 b/ush/templates/input.nml.FV3 index 2d35ca477..645e21601 100644 --- a/ush/templates/input.nml.FV3 +++ b/ush/templates/input.nml.FV3 @@ -113,6 +113,7 @@ range_warn = .true. read_increment = .false. regional = .true. + regional_bcs_from_gsi = .false. res_latlon_dynamics = 'fv3_increment.nc' reset_eta = .false. rf_cutoff = 20.e2 @@ -120,6 +121,7 @@ use_hydro_pressure = .false. vtdm4 = 0.075 warm_start = .false. + write_restart_with_bcs = .false. z_tracer = .true. / diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 8104038da..a8bd0ae72 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -35,6 +35,7 @@ valid_vals_CCPP_PHYS_SUITE=( \ "FV3_GSD_SAR" \ "FV3_GSD_v0" \ "FV3_GFS_v15p2" \ +"FV3_GFS_v15_thompson_mynn_lam3km" \ "FV3_GFS_v16" \ "FV3_RRFS_v1beta" \ "FV3_RRFS_v1alpha" \ From 8a97a760a6a82ca00a5d0769f5266e138bf07366 Mon Sep 17 00:00:00 2001 From: michelleharrold Date: Tue, 21 Dec 2021 08:48:58 -0700 Subject: [PATCH 07/15] Adding variables, levels, and thresholds to ensemble vx (feature/add_ensvx_vars) (#649) * Modifications to grid-to-point ensemble vx for additional variables, levels, and thresholds. * Update ensemble thresholds for APCP. Turning on OBS ERR for APCP. * Turn off OBS ERR for point * Turn OBS ERR to false in EnsembleStatConfig_point * Turned on obs error for ensemble point vx; fixed TCDC fcst options in conf files. * Broke out BOTH_ to FCST_ and OBS_ in certain METplus conf files. * Fixed typo in GridStat_APCP06h_prob.conf. * Add NBR ens probability section to APCP and REFC ensemble-stat configs. * Addressed typo in EnsembleStatConfig_APCP. * Removed double TCDC entry in PointStat_conus_sfc_prob.conf. Co-authored-by: Jamie Wolff --- .../parm/met/EnsembleStatConfig_APCP | 35 ++- .../parm/met/EnsembleStatConfig_REFC | 33 +- .../parm/metplus/EnsembleStat_APCP01h.conf | 4 +- .../parm/metplus/EnsembleStat_APCP03h.conf | 2 +- .../parm/metplus/EnsembleStat_APCP06h.conf | 2 +- .../parm/metplus/EnsembleStat_APCP24h.conf | 2 +- .../parm/metplus/EnsembleStat_conus_sfc.conf | 80 ++++- .../parm/metplus/EnsembleStat_upper_air.conf | 162 ++++++++-- .../parm/metplus/GridStat_APCP01h_mean.conf | 2 +- .../parm/metplus/GridStat_APCP01h_prob.conf | 12 +- .../parm/metplus/GridStat_APCP03h_mean.conf | 2 +- .../parm/metplus/GridStat_APCP03h_prob.conf | 37 +-- .../parm/metplus/GridStat_APCP06h_mean.conf | 2 +- .../parm/metplus/GridStat_APCP06h_prob.conf | 39 +-- .../parm/metplus/GridStat_APCP24h_mean.conf | 2 +- .../parm/metplus/GridStat_APCP24h_prob.conf | 39 +-- .../parm/metplus/PointStat_conus_sfc.conf | 9 +- .../metplus/PointStat_conus_sfc_mean.conf | 12 +- .../metplus/PointStat_conus_sfc_prob.conf | 182 +++++++++-- .../metplus/PointStat_upper_air_mean.conf | 105 ++++++- .../metplus/PointStat_upper_air_prob.conf | 295 ++++++++++++++++-- 21 files changed, 833 insertions(+), 225 deletions(-) diff --git a/ush/templates/parm/met/EnsembleStatConfig_APCP b/ush/templates/parm/met/EnsembleStatConfig_APCP index 34d560cb6..f9569105a 100755 --- a/ush/templates/parm/met/EnsembleStatConfig_APCP +++ b/ush/templates/parm/met/EnsembleStatConfig_APCP @@ -59,6 +59,33 @@ ens = { //////////////////////////////////////////////////////////////////////////////// +// +// Neighborhood ensemble probabilities +// +nbrhd_prob = { + width = [ 5 ]; + shape = CIRCLE; + vld_thresh = 0.0; +} + +// +// NMEP smoothing methods +// +nmep_smooth = { + vld_thresh = 0.0; + shape = CIRCLE; + gaussian_dx = 81.27; + gaussian_radius = 120; + type = [ + { + method = GAUSSIAN; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + // // Forecast and observation fields to be verified // @@ -91,7 +118,7 @@ skip_const = TRUE; // May be set separately in each "obs.field" entry // obs_error = { - flag = FALSE; // TRUE or FALSE + flag = TRUE; // TRUE or FALSE dist_type = NONE; // Distribution type dist_parm = []; // Distribution parameters inst_bias_scale = 1.0; // Instrument bias scale adjustment @@ -207,11 +234,11 @@ interp = { // Statistical output types // output_flag = { - ecnt = NONE; + ecnt = STAT; rps = NONE; rhist = STAT; phist = STAT; - orank = NONE; + orank = STAT; ssvar = STAT; relp = STAT; } @@ -234,7 +261,7 @@ ensemble_flag = { frequency = TRUE; nep = FALSE; nmep = FALSE; - rank = FALSE; + rank = TRUE; weight = FALSE; } diff --git a/ush/templates/parm/met/EnsembleStatConfig_REFC b/ush/templates/parm/met/EnsembleStatConfig_REFC index 5d44356b6..385e01a98 100755 --- a/ush/templates/parm/met/EnsembleStatConfig_REFC +++ b/ush/templates/parm/met/EnsembleStatConfig_REFC @@ -63,6 +63,33 @@ ens = { //////////////////////////////////////////////////////////////////////////////// +// +// Neighborhood ensemble probabilities +// +nbrhd_prob = { + width = [ 5 ]; + shape = CIRCLE; + vld_thresh = 0.0; +} + +// +// NMEP smoothing methods +// +nmep_smooth = { + vld_thresh = 0.0; + shape = CIRCLE; + gaussian_dx = 81.27; + gaussian_radius = 120; + type = [ + { + method = GAUSSIAN; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + // // Forecast and observation fields to be verified // @@ -215,11 +242,11 @@ interp = { // Statistical output types // output_flag = { - ecnt = NONE; + ecnt = STAT; rps = NONE; rhist = STAT; phist = STAT; - orank = NONE; + orank = STAT; ssvar = STAT; relp = STAT; } @@ -242,7 +269,7 @@ ensemble_flag = { frequency = TRUE; nep = FALSE; nmep = FALSE; - rank = FALSE; + rank = TRUE; weight = FALSE; } diff --git a/ush/templates/parm/metplus/EnsembleStat_APCP01h.conf b/ush/templates/parm/metplus/EnsembleStat_APCP01h.conf index 46dd14a82..9172b3c17 100644 --- a/ush/templates/parm/metplus/EnsembleStat_APCP01h.conf +++ b/ush/templates/parm/metplus/EnsembleStat_APCP01h.conf @@ -101,14 +101,14 @@ ENSEMBLE_STAT_MASK_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly # ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. # If the variable is not defined, or the value is not set # than the MET default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = +ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt # Ensemble Variables and levels as specified in the ens field dictionary # of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, # (optional) ENS_VARn_OPTION ENS_VAR1_NAME = APCP ENS_VAR1_LEVELS = A01 -ENS_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54 +ENS_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge2.54 # Forecast Variables and levels as specified in the fcst field dictionary # of the MET configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS, diff --git a/ush/templates/parm/metplus/EnsembleStat_APCP03h.conf b/ush/templates/parm/metplus/EnsembleStat_APCP03h.conf index cf7c95a23..fbe1f518a 100644 --- a/ush/templates/parm/metplus/EnsembleStat_APCP03h.conf +++ b/ush/templates/parm/metplus/EnsembleStat_APCP03h.conf @@ -150,7 +150,7 @@ ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt # (optional) ENS_VARn_OPTION ENS_VAR1_NAME = APCP ENS_VAR1_LEVELS = A03 -ENS_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +ENS_VAR1_THRESH = gt0.0,ge0.508,ge2.54,ge6.350 # Forecast Variables and levels as specified in the fcst field dictionary # of the MET configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS, diff --git a/ush/templates/parm/metplus/EnsembleStat_APCP06h.conf b/ush/templates/parm/metplus/EnsembleStat_APCP06h.conf index 9f8ddaafd..168cb9b98 100644 --- a/ush/templates/parm/metplus/EnsembleStat_APCP06h.conf +++ b/ush/templates/parm/metplus/EnsembleStat_APCP06h.conf @@ -150,7 +150,7 @@ ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt # (optional) ENS_VARn_OPTION ENS_VAR1_NAME = APCP ENS_VAR1_LEVELS = A06 -ENS_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +ENS_VAR1_THRESH = gt0.0,ge2.54,ge6.350,ge12.700 # Forecast Variables and levels as specified in the fcst field dictionary # of the MET configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS, diff --git a/ush/templates/parm/metplus/EnsembleStat_APCP24h.conf b/ush/templates/parm/metplus/EnsembleStat_APCP24h.conf index 4ce438fbb..1243e1f54 100644 --- a/ush/templates/parm/metplus/EnsembleStat_APCP24h.conf +++ b/ush/templates/parm/metplus/EnsembleStat_APCP24h.conf @@ -150,7 +150,7 @@ ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt # (optional) ENS_VARn_OPTION ENS_VAR1_NAME = APCP ENS_VAR1_LEVELS = A24 -ENS_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +ENS_VAR1_THRESH = gt0.0,ge6.350,ge12.700,ge25.400 # Forecast Variables and levels as specified in the fcst field dictionary # of the MET configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS, diff --git a/ush/templates/parm/metplus/EnsembleStat_conus_sfc.conf b/ush/templates/parm/metplus/EnsembleStat_conus_sfc.conf index 09d2572d7..2523fde2a 100644 --- a/ush/templates/parm/metplus/EnsembleStat_conus_sfc.conf +++ b/ush/templates/parm/metplus/EnsembleStat_conus_sfc.conf @@ -143,28 +143,78 @@ ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt ENS_VAR1_NAME = TMP ENS_VAR1_LEVELS = Z02 -ENS_VAR1_THRESH = >=293, >=298, >=303 +ENS_VAR1_THRESH = >=268, >=273, >=278, >=293, >=298, >=303 ENS_VAR2_NAME = DPT ENS_VAR2_LEVELS = Z2 -ENS_VAR2_THRESH = >=288, >=293, >=298 +ENS_VAR2_THRESH = >=263, >=268, >=273, >=288, >=293, >=298 ENS_VAR3_NAME = WIND ENS_VAR3_LEVELS = Z10 -ENS_VAR3_THRESH = >=5, >=10 +ENS_VAR3_THRESH = >=5, >=10, >=15 ENS_VAR3_OPTIONS = GRIB2_pdt = 0; ;; derive instantaneous 10-m wind from U/V components, overriding max 10-m wind -BOTH_VAR1_NAME = TMP -BOTH_VAR1_LEVELS = Z2 -BOTH_VAR1_THRESH = >=293, >=298, >=303 - -BOTH_VAR2_NAME = DPT -BOTH_VAR2_LEVELS = Z2 -BOTH_VAR2_THRESH = >=288, >=293, >=298 - -BOTH_VAR3_NAME = WIND -BOTH_VAR3_LEVELS = Z10 -BOTH_VAR3_THRESH = >=5, >=10 -BOTH_VAR3_OPTIONS = GRIB2_pdt = 0; ;; derive instantaneous 10-m wind from U/V components, overriding max 10-m wind +ENS_VAR4_NAME = TCDC +ENS_VAR4_LEVELS = L0 +ENS_VAR4_THRESH = <25, >75 +ENS_VAR4_OPTIONS = GRIB_lvl_typ = 200; GRIB2_ipdtmpl_index = [ 14 ]; GRIB2_ipdtmpl_val = [ 0 ]; interp = { type = [ { method = NEAREST; width = 1; } ]; } + +ENS_VAR5_NAME = VIS +ENS_VAR5_LEVELS = L0 +ENS_VAR5_THRESH = <1609, <8045, >=8045 +ENS_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +ENS_VAR6_NAME = HGT +ENS_VAR6_LEVELS = L0 +ENS_VAR6_THRESH = <152, <1520, >=914 +ENS_VAR6_OPTIONS = GRIB_lvl_typ = 215; desc = "CEILING"; + +FCST_VAR1_NAME = TMP +FCST_VAR1_LEVELS = Z2 +FCST_VAR1_THRESH = >=268, >=273, >=278, >=293, >=298, >=303 +OBS_VAR1_NAME = TMP +OBS_VAR1_LEVELS = Z2 +OBS_VAR1_THRESH = >=268, >=273, >=278, >=293, >=298, >=303 +OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR2_NAME = DPT +FCST_VAR2_LEVELS = Z2 +FCST_VAR2_THRESH = >=263, >=268, >=273, >=288, >=293, >=298 +OBS_VAR2_NAME = DPT +OBS_VAR2_LEVELS = Z2 +OBS_VAR2_THRESH = >=263, >=268, >=273, >=288, >=293, >=298 +OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR3_NAME = WIND +FCST_VAR3_LEVELS = Z10 +FCST_VAR3_THRESH = >=5, >=10, >=15 +FCST_VAR3_OPTIONS = GRIB2_pdt = 0; ;; derive instantaneous 10-m wind from U/V components, overriding max 10-m wind +OBS_VAR3_NAME = WIND +OBS_VAR3_LEVELS = Z10 +OBS_VAR3_THRESH = >=5, >=10, >=15 +OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR4_NAME = TCDC +FCST_VAR4_LEVELS = L0 +FCST_VAR4_THRESH = <25, >75 +FCST_VAR4_OPTIONS = GRIB_lvl_typ = 200; GRIB2_ipdtmpl_index = 8; GRIB2_ipdtmpl_val = {lead?fmt=%H}; +OBS_VAR4_NAME = TCDC +OBS_VAR4_LEVELS = L0 +OBS_VAR4_THRESH = <25, >75 +OBS_VAR4_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +BOTH_VAR5_NAME = VIS +BOTH_VAR5_LEVELS = L0 +BOTH_VAR5_THRESH = <1609, <8045, >=8045 +BOTH_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR6_NAME = HGT +FCST_VAR6_LEVELS = L0 +FCST_VAR6_THRESH = <152, <1520, >=914 +FCST_VAR6_OPTIONS = GRIB_lvl_typ = 215; desc = "CEILING"; +OBS_VAR6_NAME = CEILING +OBS_VAR6_LEVELS = L0 +OBS_VAR6_OPTIONS = GRIB_lvl_typ = 215; interp = { type = [ { method = NEAREST; width = 1; } ]; } +OBS_VAR6_THRESH = <152, <305, >=914 ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_ADPSFC_{OBTYPE} diff --git a/ush/templates/parm/metplus/EnsembleStat_upper_air.conf b/ush/templates/parm/metplus/EnsembleStat_upper_air.conf index 19d982ecd..fb2070e87 100644 --- a/ush/templates/parm/metplus/EnsembleStat_upper_air.conf +++ b/ush/templates/parm/metplus/EnsembleStat_upper_air.conf @@ -108,7 +108,7 @@ OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} # number of expected members for ensemble. Should correspond with the # number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = 10 +ENSEMBLE_STAT_N_MEMBERS = {ENV[NUM_ENS_MEMBERS]} # ens.ens_thresh value in the MET config file # threshold for ratio of valid files to expected files to allow app to run @@ -150,24 +150,146 @@ ENS_VAR1_NAME = TMP ENS_VAR1_LEVELS = P850 ENS_VAR1_THRESH = >=288, >=293, >=298 -ENS_VAR2_NAME = DPT -ENS_VAR2_LEVELS = P850 -ENS_VAR2_THRESH = >=283, >=288, >=293 - -ENS_VAR3_NAME = WIND -ENS_VAR3_LEVELS = P850 -ENS_VAR3_THRESH = >=5, >=10 - -BOTH_VAR1_NAME = TMP -BOTH_VAR1_LEVELS = P850 -BOTH_VAR1_THRESH = >=288, >=293, >=298 - -BOTH_VAR2_NAME = DPT -BOTH_VAR2_LEVELS = P850 -BOTH_VAR2_THRESH = >=283, >=288, >=293 - -BOTH_VAR3_NAME = WIND -BOTH_VAR3_LEVELS = P850 -BOTH_VAR3_THRESH = >=5, >=10 +ENS_VAR2_NAME = TMP +ENS_VAR2_LEVELS = P700 +ENS_VAR2_THRESH = >=273, >=278, >=283 + +ENS_VAR3_NAME = TMP +ENS_VAR3_LEVELS = P500 +ENS_VAR3_THRESH = >=258, >=263, >=268 + +ENS_VAR4_NAME = DPT +ENS_VAR4_LEVELS = P850 +ENS_VAR4_THRESH = >=273, >=278, >=283 + +ENS_VAR5_NAME = DPT +ENS_VAR5_LEVELS = P700 +ENS_VAR5_THRESH = >=263, >=268, >=273 + +ENS_VAR6_NAME = WIND +ENS_VAR6_LEVELS = P850 +ENS_VAR6_THRESH = >=5, >=10, >=15 + +ENS_VAR7_NAME = WIND +ENS_VAR7_LEVELS = P700 +ENS_VAR7_THRESH = >=10, >=15, >=20 + +ENS_VAR8_NAME = WIND +ENS_VAR8_LEVELS = P500 +ENS_VAR8_THRESH = >=15, >=21, >=26 + +ENS_VAR9_NAME = WIND +ENS_VAR9_LEVELS = P250 +ENS_VAR9_THRESH = >=26, >=31, >=36, >=46, >=62 + +ENS_VAR10_NAME = HGT +ENS_VAR10_LEVELS = P500 +ENS_VAR10_THRESH = >=5400, >=5600, >=5880 + +ENS_VAR11_NAME = CAPE +ENS_VAR11_LEVELS = L0 +ENS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; +ENS_VAR11_THRESH = <=1000, >1000&&<2500, >2500&&<4000, >2500 + +ENS_VAR12_NAME = HPBL +ENS_VAR12_LEVELS = Z0 +ENS_VAR12_THRESH = <500, <1500, >1500 + +FCST_VAR1_NAME = TMP +FCST_VAR1_LEVELS = P850 +FCST_VAR1_THRESH = >=288, >=293, >=298 +OBS_VAR1_NAME = TMP +OBS_VAR1_LEVELS = P850 +OBS_VAR1_THRESH = >=288, >=293, >=298 +OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR2_NAME = TMP +FCST_VAR2_LEVELS = P700 +FCST_VAR2_THRESH = >=273, >=278, >=283 +OBS_VAR2_NAME = TMP +OBS_VAR2_LEVELS = P700 +OBS_VAR2_THRESH = >=273, >=278, >=283 +OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR3_NAME = TMP +FCST_VAR3_LEVELS = P500 +FCST_VAR3_THRESH = >=258, >=263, >=268 +OBS_VAR3_NAME = TMP +OBS_VAR3_LEVELS = P500 +OBS_VAR3_THRESH = >=258, >=263, >=268 +OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR4_NAME = DPT +FCST_VAR4_LEVELS = P850 +FCST_VAR4_THRESH = >=273, >=278, >=283 +OBS_VAR4_NAME = DPT +OBS_VAR4_LEVELS = P850 +OBS_VAR4_THRESH = >=273, >=278, >=283 +OBS_VAR4_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR5_NAME = DPT +FCST_VAR5_LEVELS = P700 +FCST_VAR5_THRESH = >=263, >=286, >=273 +OBS_VAR5_NAME = DPT +OBS_VAR5_LEVELS = P700 +OBS_VAR5_THRESH = >=263, >=286, >=273 +OBS_VAR5_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR6_NAME = WIND +FCST_VAR6_LEVELS = P850 +FCST_VAR6_THRESH = >=5, >=10, >=15 +OBS_VAR6_NAME = WIND +OBS_VAR6_LEVELS = P850 +OBS_VAR6_THRESH = >=5, >=10, >=15 +OBS_VAR6_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR7_NAME = WIND +FCST_VAR7_LEVELS = P700 +FCST_VAR7_THRESH = >=10, >=15, >=20 +OBS_VAR7_NAME = WIND +OBS_VAR7_LEVELS = P700 +OBSVAR7_THRESH = >=10, >=15, >=20 +OBS_VAR7_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR8_NAME = WIND +FCST_VAR8_LEVELS = P500 +FCST_VAR8_THRESH = >=15, >=21, >=26 +OBS_VAR8_NAME = WIND +OBS_VAR8_LEVELS = P500 +OBS_VAR8_THRESH = >=15, >=21, >=26 +OBS_VAR8_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR9_NAME = WIND +FCST_VAR9_LEVELS = P250 +FCST_VAR9_THRESH = >=26, >=31, >=36, >=46, >=62 +OBS_VAR9_NAME = WIND +OBS_VAR9_LEVELS = P250 +OBS_VAR9_THRESH = >=26, >=31, >=36, >=46, >=62 +OBS_VAR9_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR10_NAME = HGT +FCST_VAR10_LEVELS = P500 +FCST_VAR10_THRESH = >=5400, >=5600, >=5880 +OBS_VAR10_NAME = HGT +OBS_VAR10_LEVELS = P500 +OBS_VAR10_THRESH = >=5400, >=5600, >=5880 +OBS_VAR10_OPTIONS = obs_error = { flag = TRUE; } + +FCST_VAR11_NAME = CAPE +FCST_VAR11_LEVELS = L0 +FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; +FCST_VAR11_THRESH = <=1000, >1000&&<2500, >=2500&&<4000, >=2500 +OBS_VAR11_NAME = CAPE +OBS_VAR11_LEVELS = L0-100000 +OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR11_THRESH = <=1000, >1000&&<2500, >=2500&&<4000, >=2500 + +FCST_VAR12_NAME = HPBL +FCST_VAR12_LEVELS = Z0 +FCST_VAR12_THRESH = <500, <1500, >1500 +OBS_VAR12_NAME = PBL +OBS_VAR12_LEVELS = L0 +OBS_VAR12_OPTIONS = desc = "TKE"; +OBS_VAR12_THRESH = <500, <1500, >1500 ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_ADPUPA_{OBTYPE} diff --git a/ush/templates/parm/metplus/GridStat_APCP01h_mean.conf b/ush/templates/parm/metplus/GridStat_APCP01h_mean.conf index 16cd58e7f..c43d63ead 100644 --- a/ush/templates/parm/metplus/GridStat_APCP01h_mean.conf +++ b/ush/templates/parm/metplus/GridStat_APCP01h_mean.conf @@ -81,7 +81,7 @@ GRID_STAT_REGRID_TO_GRID = FCST FCST_VAR1_NAME = APCP_01_A01_ENS_MEAN FCST_VAR1_LEVELS = A01 -BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54 +BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge2.54 OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A01 diff --git a/ush/templates/parm/metplus/GridStat_APCP01h_prob.conf b/ush/templates/parm/metplus/GridStat_APCP01h_prob.conf index 81b3d9a6d..52065ba7a 100644 --- a/ush/templates/parm/metplus/GridStat_APCP01h_prob.conf +++ b/ush/templates/parm/metplus/GridStat_APCP01h_prob.conf @@ -103,21 +103,13 @@ OBS_VAR3_NAME = APCP OBS_VAR3_LEVELS = A01 OBS_VAR3_THRESH = >=0.508 -FCST_VAR4_NAME = APCP_01_A01_ENS_FREQ_ge1.27 +FCST_VAR4_NAME = APCP_01_A01_ENS_FREQ_ge2.54 FCST_VAR4_LEVELS = A01 FCST_VAR4_THRESH = ==0.1 OBS_VAR4_NAME = APCP OBS_VAR4_LEVELS = A01 -OBS_VAR4_THRESH = >=1.27 - -FCST_VAR5_NAME = APCP_01_A01_ENS_FREQ_ge2.54 -FCST_VAR5_LEVELS = A01 -FCST_VAR5_THRESH = ==0.1 - -OBS_VAR5_NAME = APCP -OBS_VAR5_LEVELS = A01 -OBS_VAR5_THRESH = >=2.54 +OBS_VAR4_THRESH = >=2.54 # Neighborhood shape and widths diff --git a/ush/templates/parm/metplus/GridStat_APCP03h_mean.conf b/ush/templates/parm/metplus/GridStat_APCP03h_mean.conf index 536ce241f..67db9927d 100644 --- a/ush/templates/parm/metplus/GridStat_APCP03h_mean.conf +++ b/ush/templates/parm/metplus/GridStat_APCP03h_mean.conf @@ -75,7 +75,7 @@ GRID_STAT_REGRID_TO_GRID = FCST FCST_VAR1_NAME = APCP_A3_ENS_MEAN FCST_VAR1_LEVELS = A3 -BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +BOTH_VAR1_THRESH = gt0.0,ge0.508,ge2.54,ge6.350 OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A3 diff --git a/ush/templates/parm/metplus/GridStat_APCP03h_prob.conf b/ush/templates/parm/metplus/GridStat_APCP03h_prob.conf index 984eba7f0..90f16b522 100644 --- a/ush/templates/parm/metplus/GridStat_APCP03h_prob.conf +++ b/ush/templates/parm/metplus/GridStat_APCP03h_prob.conf @@ -80,54 +80,29 @@ OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A03 OBS_VAR1_THRESH = >0.0 -FCST_VAR2_NAME = APCP_A3_ENS_FREQ_ge0.254 +FCST_VAR2_NAME = APCP_A3_ENS_FREQ_ge0.508 FCST_VAR2_LEVELS = A03 FCST_VAR2_THRESH = ==0.1 OBS_VAR2_NAME = APCP OBS_VAR2_LEVELS = A03 -OBS_VAR2_THRESH = >=0.254 +OBS_VAR2_THRESH = >=0.508 -FCST_VAR3_NAME = APCP_A3_ENS_FREQ_ge0.508 +FCST_VAR3_NAME = APCP_A3_ENS_FREQ_ge2.54 FCST_VAR3_LEVELS = A03 FCST_VAR3_THRESH = ==0.1 OBS_VAR3_NAME = APCP OBS_VAR3_LEVELS = A03 -OBS_VAR3_THRESH = >=0.508 +OBS_VAR3_THRESH = >=2.54 -FCST_VAR4_NAME = APCP_A3_ENS_FREQ_ge1.27 +FCST_VAR4_NAME = APCP_A3_ENS_FREQ_ge6.350 FCST_VAR4_LEVELS = A03 FCST_VAR4_THRESH = ==0.1 OBS_VAR4_NAME = APCP OBS_VAR4_LEVELS = A03 -OBS_VAR4_THRESH = >=1.27 - -FCST_VAR5_NAME = APCP_A3_ENS_FREQ_ge2.54 -FCST_VAR5_LEVELS = A03 -FCST_VAR5_THRESH = ==0.1 - -OBS_VAR5_NAME = APCP -OBS_VAR5_LEVELS = A03 -OBS_VAR5_THRESH = >=2.54 - -FCST_VAR6_NAME = APCP_A3_ENS_FREQ_ge3.810 -FCST_VAR6_LEVELS = A03 -FCST_VAR6_THRESH = ==0.1 - -OBS_VAR6_NAME = APCP -OBS_VAR6_LEVELS = A03 -OBS_VAR6_THRESH = >=3.810 - -FCST_VAR7_NAME = APCP_A3_ENS_FREQ_ge6.350 -FCST_VAR7_LEVELS = A03 -FCST_VAR7_THRESH = ==0.1 - -OBS_VAR7_NAME = APCP -OBS_VAR7_LEVELS = A03 -OBS_VAR7_THRESH = >=6.350 - +OBS_VAR4_THRESH = >=6.350 # Neighborhood shape and widths GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE diff --git a/ush/templates/parm/metplus/GridStat_APCP06h_mean.conf b/ush/templates/parm/metplus/GridStat_APCP06h_mean.conf index b10707f0f..ef2ddb953 100644 --- a/ush/templates/parm/metplus/GridStat_APCP06h_mean.conf +++ b/ush/templates/parm/metplus/GridStat_APCP06h_mean.conf @@ -75,7 +75,7 @@ GRID_STAT_REGRID_TO_GRID = FCST FCST_VAR1_NAME = APCP_A6_ENS_MEAN FCST_VAR1_LEVELS = A06 -BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +BOTH_VAR1_THRESH = gt0.0,ge2.54,ge6.350,ge12.700 OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A06 diff --git a/ush/templates/parm/metplus/GridStat_APCP06h_prob.conf b/ush/templates/parm/metplus/GridStat_APCP06h_prob.conf index 7c5e08672..5f75e5097 100644 --- a/ush/templates/parm/metplus/GridStat_APCP06h_prob.conf +++ b/ush/templates/parm/metplus/GridStat_APCP06h_prob.conf @@ -80,54 +80,29 @@ OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A06 OBS_VAR1_THRESH = >0.0 -FCST_VAR2_NAME = APCP_A6_ENS_FREQ_ge0.254 +FCST_VAR2_NAME = APCP_A6_ENS_FREQ_ge2.54 FCST_VAR2_LEVELS = A06 FCST_VAR2_THRESH = ==0.1 OBS_VAR2_NAME = APCP OBS_VAR2_LEVELS = A06 -OBS_VAR2_THRESH = >=0.254 +OBS_VAR2_THRESH = >=2.54 -FCST_VAR3_NAME = APCP_A6_ENS_FREQ_ge0.508 +FCST_VAR3_NAME = APCP_A6_ENS_FREQ_ge6.350 FCST_VAR3_LEVELS = A06 FCST_VAR3_THRESH = ==0.1 OBS_VAR3_NAME = APCP OBS_VAR3_LEVELS = A06 -OBS_VAR3_THRESH = >=0.508 +OBS_VAR3_THRESH = >=6.350 -FCST_VAR4_NAME = APCP_A6_ENS_FREQ_ge1.27 -FCST_VAR4_LEVELS = A06 +FCST_VAR4_NAME = APCP_A6_ENS_FREQ_ge12.700 +FCST_VAR4_LEVELS = A06 FCST_VAR4_THRESH = ==0.1 OBS_VAR4_NAME = APCP OBS_VAR4_LEVELS = A06 -OBS_VAR4_THRESH = >=1.27 - -FCST_VAR5_NAME = APCP_A6_ENS_FREQ_ge2.54 -FCST_VAR5_LEVELS = A06 -FCST_VAR5_THRESH = ==0.1 - -OBS_VAR5_NAME = APCP -OBS_VAR5_LEVELS = A06 -OBS_VAR5_THRESH = >=2.54 - -FCST_VAR6_NAME = APCP_A6_ENS_FREQ_ge3.810 -FCST_VAR6_LEVELS = A06 -FCST_VAR6_THRESH = ==0.1 - -OBS_VAR6_NAME = APCP -OBS_VAR6_LEVELS = A06 -OBS_VAR6_THRESH = >=3.810 - -FCST_VAR7_NAME = APCP_A6_ENS_FREQ_ge6.350 -FCST_VAR7_LEVELS = A06 -FCST_VAR7_THRESH = ==0.1 - -OBS_VAR7_NAME = APCP -OBS_VAR7_LEVELS = A06 -OBS_VAR7_THRESH = >=6.350 - +OBS_VAR4_THRESH = >=12.700 # Neighborhood shape and widths GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE diff --git a/ush/templates/parm/metplus/GridStat_APCP24h_mean.conf b/ush/templates/parm/metplus/GridStat_APCP24h_mean.conf index aa0c53e21..ad1308e20 100644 --- a/ush/templates/parm/metplus/GridStat_APCP24h_mean.conf +++ b/ush/templates/parm/metplus/GridStat_APCP24h_mean.conf @@ -75,7 +75,7 @@ GRID_STAT_REGRID_TO_GRID = FCST FCST_VAR1_NAME = APCP_A24_ENS_MEAN FCST_VAR1_LEVELS = A24 -BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350 +BOTH_VAR1_THRESH = gt0.0,ge6.350,ge12.700,ge25.400 OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A24 diff --git a/ush/templates/parm/metplus/GridStat_APCP24h_prob.conf b/ush/templates/parm/metplus/GridStat_APCP24h_prob.conf index 26d6e6368..a3659ea7b 100644 --- a/ush/templates/parm/metplus/GridStat_APCP24h_prob.conf +++ b/ush/templates/parm/metplus/GridStat_APCP24h_prob.conf @@ -80,54 +80,29 @@ OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A24 OBS_VAR1_THRESH = >0.0 -FCST_VAR2_NAME = APCP_A24_ENS_FREQ_ge0.254 -FCST_VAR2_LEVELS = A24 +FCST_VAR2_NAME = APCP_A24_ENS_FREQ_ge6.350 +FCST_VAR2_LEVELS = A24 FCST_VAR2_THRESH = ==0.1 OBS_VAR2_NAME = APCP OBS_VAR2_LEVELS = A24 -OBS_VAR2_THRESH = >=0.254 +OBS_VAR2_THRESH = >=6.350 -FCST_VAR3_NAME = APCP_A24_ENS_FREQ_ge0.508 +FCST_VAR3_NAME = APCP_A24_ENS_FREQ_ge12.700 FCST_VAR3_LEVELS = A24 FCST_VAR3_THRESH = ==0.1 OBS_VAR3_NAME = APCP OBS_VAR3_LEVELS = A24 -OBS_VAR3_THRESH = >=0.508 +OBS_VAR3_THRESH = >=12.700 -FCST_VAR4_NAME = APCP_A24_ENS_FREQ_ge1.27 +FCST_VAR4_NAME = APCP_A24_ENS_FREQ_ge25.400 FCST_VAR4_LEVELS = A24 FCST_VAR4_THRESH = ==0.1 OBS_VAR4_NAME = APCP OBS_VAR4_LEVELS = A24 -OBS_VAR4_THRESH = >=1.27 - -FCST_VAR5_NAME = APCP_A24_ENS_FREQ_ge2.54 -FCST_VAR5_LEVELS = A24 -FCST_VAR5_THRESH = ==0.1 - -OBS_VAR5_NAME = APCP -OBS_VAR5_LEVELS = A24 -OBS_VAR5_THRESH = >=2.54 - -FCST_VAR6_NAME = APCP_A24_ENS_FREQ_ge3.810 -FCST_VAR6_LEVELS = A24 -FCST_VAR6_THRESH = ==0.1 - -OBS_VAR6_NAME = APCP -OBS_VAR6_LEVELS = A24 -OBS_VAR6_THRESH = >=3.810 - -FCST_VAR7_NAME = APCP_A24_ENS_FREQ_ge6.350 -FCST_VAR7_LEVELS = A24 -FCST_VAR7_THRESH = ==0.1 - -OBS_VAR7_NAME = APCP -OBS_VAR7_LEVELS = A24 -OBS_VAR7_THRESH = >=6.350 - +OBS_VAR4_THRESH = >=25.400 # Neighborhood shape and widths GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE diff --git a/ush/templates/parm/metplus/PointStat_conus_sfc.conf b/ush/templates/parm/metplus/PointStat_conus_sfc.conf index 5fd3f58c9..4eccff4c1 100644 --- a/ush/templates/parm/metplus/PointStat_conus_sfc.conf +++ b/ush/templates/parm/metplus/PointStat_conus_sfc.conf @@ -153,9 +153,12 @@ BOTH_VAR6_OPTIONS = GRIB2_pdt = 0; ;; derive instantaneous 10-m wind from U/V co BOTH_VAR7_NAME = PRMSL BOTH_VAR7_LEVELS = Z0 -BOTH_VAR8_NAME = TCDC -BOTH_VAR8_LEVELS = L0 -BOTH_VAR8_OPTIONS = GRIB_lvl_typ = 200; interp = { type = [ { method = NEAREST; width = 1; } ]; } +FCST_VAR8_NAME = TCDC +FCST_VAR8_LEVELS = L0 +FCST_VAR8_OPTIONS = GRIB_lvl_typ = 200; GRIB2_ipdtmpl_index = 8; GRIB2_ipdtmpl_val = {lead?fmt=%H}; +OBS_VAR8_NAME = TCDC +OBS_VAR8_LEVELS = L0 +OBS_VAR8_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } BOTH_VAR9_NAME = VIS BOTH_VAR9_LEVELS = L0 diff --git a/ush/templates/parm/metplus/PointStat_conus_sfc_mean.conf b/ush/templates/parm/metplus/PointStat_conus_sfc_mean.conf index c5adc81df..fcc45a4c9 100644 --- a/ush/templates/parm/metplus/PointStat_conus_sfc_mean.conf +++ b/ush/templates/parm/metplus/PointStat_conus_sfc_mean.conf @@ -128,24 +128,24 @@ POINT_STAT_MESSAGE_TYPE = ADPSFC # (optional) FCST_VARn_OPTION FCST_VAR1_NAME = TMP_Z2_ENS_MEAN FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = >=293, >=298, >=303 +FCST_VAR1_THRESH = >=268, >=273, >=278, >=293, >=298, >=303 OBS_VAR1_NAME = TMP OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = >=293, >=298, >=303 +OBS_VAR1_THRESH = >=268, >=273, >=278, >=293, >=298, >=303 FCST_VAR2_NAME = DPT_Z2_ENS_MEAN FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = >=288, >=293, >=298 +FCST_VAR2_THRESH = >=263, >=268, >=273, >=288, >=293, >=298 OBS_VAR2_NAME = DPT OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = >=288, >=293, >=298 +OBS_VAR2_THRESH = >=263, >=268, >=273, >=288, >=293, >=298 FCST_VAR3_NAME = WIND_Z10_ENS_MEAN FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = >=5, >=10 +FCST_VAR3_THRESH = >=5, >=10, >=15 OBS_VAR3_NAME = WIND OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = >=5, >=10 +OBS_VAR3_THRESH = >=5, >=10, >=15 diff --git a/ush/templates/parm/metplus/PointStat_conus_sfc_prob.conf b/ush/templates/parm/metplus/PointStat_conus_sfc_prob.conf index f7ef100f7..bffc327ad 100644 --- a/ush/templates/parm/metplus/PointStat_conus_sfc_prob.conf +++ b/ush/templates/parm/metplus/PointStat_conus_sfc_prob.conf @@ -126,69 +126,201 @@ POINT_STAT_MESSAGE_TYPE = ADPSFC # Variables and levels as specified in the field dictionary of the MET # point_stat configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS, # (optional) FCST_VARn_OPTION -FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge293 +FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge268 FCST_VAR1_LEVELS = (*,*) FCST_VAR1_THRESH = ==0.1 OBS_VAR1_NAME = TMP OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = >=293 +OBS_VAR1_THRESH = >=268 -FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge298 +FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge273 FCST_VAR2_LEVELS = (*,*) FCST_VAR2_THRESH = ==0.1 OBS_VAR2_NAME = TMP OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = >=298 +OBS_VAR2_THRESH = >=273 -FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge303 +FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge278 FCST_VAR3_LEVELS = (*,*) FCST_VAR3_THRESH = ==0.1 OBS_VAR3_NAME = TMP OBS_VAR3_LEVELS = Z2 -OBS_VAR3_THRESH = >=303 +OBS_VAR3_THRESH = >=278 -FCST_VAR4_NAME = DPT_Z2_ENS_FREQ_ge288 +FCST_VAR4_NAME = TMP_Z2_ENS_FREQ_ge293 FCST_VAR4_LEVELS = (*,*) FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = DPT +OBS_VAR4_NAME = TMP OBS_VAR4_LEVELS = Z2 -OBS_VAR4_THRESH = >=288 +OBS_VAR4_THRESH = >=293 -FCST_VAR5_NAME = DPT_Z2_ENS_FREQ_ge293 +FCST_VAR5_NAME = TMP_Z2_ENS_FREQ_ge298 FCST_VAR5_LEVELS = (*,*) FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = DPT +OBS_VAR5_NAME = TMP OBS_VAR5_LEVELS = Z2 -OBS_VAR5_THRESH = >=293 +OBS_VAR5_THRESH = >=298 -FCST_VAR6_NAME = DPT_Z2_ENS_FREQ_ge298 +FCST_VAR6_NAME = TMP_Z2_ENS_FREQ_ge303 FCST_VAR6_LEVELS = (*,*) FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = DPT +OBS_VAR6_NAME = TMP OBS_VAR6_LEVELS = Z2 -OBS_VAR6_THRESH = >=298 +OBS_VAR6_THRESH = >=303 -FCST_VAR7_NAME = WIND_Z10_ENS_FREQ_ge5 -FCST_VAR7_LEVELS = Z10 +FCST_VAR7_NAME = DPT_Z2_ENS_FREQ_ge263 +FCST_VAR7_LEVELS = (*,*) FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = Z10 -OBS_VAR7_THRESH = >=5 +OBS_VAR7_NAME = DPT +OBS_VAR7_LEVELS = Z2 +OBS_VAR7_THRESH = >=263 -FCST_VAR8_NAME = WIND_Z10_ENS_FREQ_ge10 -FCST_VAR8_LEVELS = Z10 +FCST_VAR8_NAME = DPT_Z2_ENS_FREQ_ge268 +FCST_VAR8_LEVELS = (*,*) FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = Z10 -OBS_VAR8_THRESH = >=10 +OBS_VAR8_NAME = DPT +OBS_VAR8_LEVELS = Z2 +OBS_VAR8_THRESH = >=268 + +FCST_VAR9_NAME = DPT_Z2_ENS_FREQ_ge273 +FCST_VAR9_LEVELS = (*,*) +FCST_VAR9_THRESH = ==0.1 + +OBS_VAR9_NAME = DPT +OBS_VAR9_LEVELS = Z2 +OBS_VAR9_THRESH = >=273 + +FCST_VAR10_NAME = DPT_Z2_ENS_FREQ_ge288 +FCST_VAR10_LEVELS = (*,*) +FCST_VAR10_THRESH = ==0.1 + +OBS_VAR10_NAME = DPT +OBS_VAR10_LEVELS = Z2 +OBS_VAR10_THRESH = >=288 + +FCST_VAR11_NAME = DPT_Z2_ENS_FREQ_ge293 +FCST_VAR11_LEVELS = (*,*) +FCST_VAR11_THRESH = ==0.1 + +OBS_VAR11_NAME = DPT +OBS_VAR11_LEVELS = Z2 +OBS_VAR11_THRESH = >=293 + +FCST_VAR12_NAME = DPT_Z2_ENS_FREQ_ge298 +FCST_VAR12_LEVELS = (*,*) +FCST_VAR12_THRESH = ==0.1 + +OBS_VAR12_NAME = DPT +OBS_VAR12_LEVELS = Z2 +OBS_VAR12_THRESH = >=298 + +FCST_VAR13_NAME = WIND_Z10_ENS_FREQ_ge5 +FCST_VAR13_LEVELS = Z10 +FCST_VAR13_THRESH = ==0.1 + +OBS_VAR13_NAME = WIND +OBS_VAR13_LEVELS = Z10 +OBS_VAR13_THRESH = >=5 + +FCST_VAR14_NAME = WIND_Z10_ENS_FREQ_ge10 +FCST_VAR14_LEVELS = Z10 +FCST_VAR14_THRESH = ==0.1 + +OBS_VAR14_NAME = WIND +OBS_VAR14_LEVELS = Z10 +OBS_VAR14_THRESH = >=10 + +FCST_VAR15_NAME = WIND_Z10_ENS_FREQ_ge15 +FCST_VAR15_LEVELS = Z10 +FCST_VAR15_THRESH = ==0.1 + +OBS_VAR15_NAME = WIND +OBS_VAR15_LEVELS = Z10 +OBS_VAR15_THRESH = >=15 + +FCST_VAR16_NAME = TCDC_L0_ENS_FREQ_lt25 +FCST_VAR16_LEVELS = L0 +FCST_VAR16_THRESH = ==0.1 + +OBS_VAR16_NAME = TCDC +OBS_VAR16_LEVELS = L0 +OBS_VAR16_THRESH = <25 + +FCST_VAR17_NAME = TCDC_L0_ENS_FREQ_gt75 +FCST_VAR17_LEVELS = L0 +FCST_VAR17_THRESH = ==0.1 + +OBS_VAR17_NAME = TCDC +OBS_VAR17_LEVELS = L0 +OBS_VAR17_THRESH = >75 + +FCST_VAR18_NAME = VIS_L0_ENS_FREQ_lt1609 +FCST_VAR18_LEVELS = L0 +FCST_VAR18_THRESH = ==0.1 +FCST_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +OBS_VAR18_NAME = VIS +OBS_VAR18_LEVELS = L0 +OBS_VAR18_THRESH = <1609 +OBS_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR19_NAME = VIS_L0_ENS_FREQ_lt8045 +FCST_VAR19_LEVELS = L0 +FCST_VAR19_THRESH = ==0.1 +FCST_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +OBS_VAR19_NAME = VIS +OBS_VAR19_LEVELS = L0 +OBS_VAR19_THRESH = <8045 +OBS_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR20_NAME = VIS_L0_ENS_FREQ_ge8045 +FCST_VAR20_LEVELS = L0 +FCST_VAR20_THRESH = ==0.1 +FCST_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +OBS_VAR20_NAME = VIS +OBS_VAR20_LEVELS = L0 +OBS_VAR20_THRESH = >=8045 +OBS_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR21_NAME = HGT_L0_ENS_FREQ_lt152 +FCST_VAR21_LEVELS = L0 +FCST_VAR21_THRESH = ==0.1 +FCST_VAR21_OPTIONS = desc = "CEILING"; + +OBS_VAR21_NAME = CEILING +OBS_VAR21_LEVELS = L0 +OBS_VAR21_THRESH = <152 +OBS_VAR21_OPTIONS = GRIB_lvl_typ = 215; interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR22_NAME = HGT_L0_ENS_FREQ_lt1520 +FCST_VAR22_LEVELS = L0 +FCST_VAR22_THRESH = ==0.1 +FCST_VAR22_OPTIONS = desc = "CEILING"; + +OBS_VAR22_NAME = CEILING +OBS_VAR22_LEVELS = L0 +OBS_VAR22_THRESH = <1520 +OBS_VAR22_OPTIONS = GRIB_lvl_typ = 215; interp = { type = [ { method = NEAREST; width = 1; } ]; } + +FCST_VAR23_NAME = HGT_L0_ENS_FREQ_ge914 +FCST_VAR23_LEVELS = L0 +FCST_VAR23_THRESH = ==0.1 +FCST_VAR23_OPTIONS = desc = "CEILING"; + +OBS_VAR23_NAME = CEILING +OBS_VAR23_LEVELS = L0 +OBS_VAR23_THRESH = >=914 +OBS_VAR23_OPTIONS = GRIB_lvl_typ = 215; interp = { type = [ { method = NEAREST; width = 1; } ]; } # Forecast data description variables FCST_IS_PROB = True diff --git a/ush/templates/parm/metplus/PointStat_upper_air_mean.conf b/ush/templates/parm/metplus/PointStat_upper_air_mean.conf index e6a1a062a..3f7dfc8f2 100644 --- a/ush/templates/parm/metplus/PointStat_upper_air_mean.conf +++ b/ush/templates/parm/metplus/PointStat_upper_air_mean.conf @@ -135,18 +135,93 @@ OBS_VAR1_NAME = TMP OBS_VAR1_LEVELS = P850 OBS_VAR1_THRESH = >=288, >=293, >=298 -FCST_VAR2_NAME = DPT_P850_ENS_MEAN -FCST_VAR2_LEVELS = P850 -FCST_VAR2_THRESH = >=283, >=288, >=293 - -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = P850 -OBS_VAR2_THRESH = >=283, >=288, >=293 - -FCST_VAR3_NAME = WIND_P850_ENS_MEAN -FCST_VAR3_LEVELS = P850 -FCST_VAR3_THRESH = >=5, >=10 - -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = P850 -OBS_VAR3_THRESH = >=5, >=10 +FCST_VAR2_NAME = TMP_P700_ENS_MEAN +FCST_VAR2_LEVELS = P700 +FCST_VAR2_THRESH = >=273, >=278, >=283 + +OBS_VAR2_NAME = TMP +OBS_VAR2_LEVELS = P700 +OBS_VAR2_THRESH = >=273, >=278, >=283 + +FCST_VAR3_NAME = TMP_P500_ENS_MEAN +FCST_VAR3_LEVELS = P500 +FCST_VAR3_THRESH = >=258, >=263, >=268 + +OBS_VAR3_NAME = TMP +OBS_VAR3_LEVELS = P500 +OBS_VAR3_THRESH = >=258, >=263, >=268 + +FCST_VAR4_NAME = DPT_P850_ENS_MEAN +FCST_VAR4_LEVELS = P850 +FCST_VAR4_THRESH = >=273, >=278, >=283 + +OBS_VAR4_NAME = DPT +OBS_VAR4_LEVELS = P850 +OBS_VAR4_THRESH = >=273, >=278, >=283 + +FCST_VAR5_NAME = DPT_P850_ENS_MEAN +FCST_VAR5_LEVELS = P700 +FCST_VAR5_THRESH = >=263, >=286, >=273 + +OBS_VAR5_NAME = DPT +OBS_VAR5_LEVELS = P700 +OBS_VAR5_THRESH = >=263, >=286, >=273 + +FCST_VAR6_NAME = WIND_P850_ENS_MEAN +FCST_VAR6_LEVELS = P850 +FCST_VAR6_THRESH = >=5, >=10, >=15 + +OBS_VAR6_NAME = WIND +OBS_VAR6_LEVELS = P850 +OBS_VAR6_THRESH = >=5, >=10, >=15 + +FCST_VAR7_NAME = WIND_P700_ENS_MEAN +FCST_VAR7_LEVELS = P700 +FCST_VAR7_THRESH = >=10, >=15, >=20 + +OBS_VAR7_NAME = WIND +OBS_VAR7_LEVELS = P700 +OBS_VAR7_THRESH = >=10, >=15, >=20 + +FCST_VAR8_NAME = WIND_P500_ENS_MEAN +FCST_VAR8_LEVELS = P500 +FCST_VAR8_THRESH = >=15, >=21, >=26 + +OBS_VAR8_NAME = WIND +OBS_VAR8_LEVELS = P500 +OBS_VAR8_THRESH = >=15, >=21, >=26 + +FCST_VAR9_NAME = WIND_P250_ENS_MEAN +FCST_VAR9_LEVELS = P250 +FCST_VAR9_THRESH = >=26, >=31, >=46, >=62 + +OBS_VAR9_NAME = WIND +OBS_VAR9_LEVELS = P250 +OBS_VAR9_THRESH = >=26, >=31, >=46, >=62 + +FCST_VAR10_NAME = HGT_P500_ENS_MEAN +FCST_VAR10_LEVELS = P500 +FCST_VAR10_THRESH = >=5400, >=5600, >=5880 + +OBS_VAR10_NAME = HGT +OBS_VAR10_LEVELS = P500 +OBS_VAR10_THRESH = >=5400, >=5600, >=5880 + +FCST_VAR11_NAME = CAPE_L0_ENS_MEAN +FCST_VAR11_LEVELS = L0 +FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; +FCST_VAR11_THRESH = <=1000, >1000&&<2500, >2500&&<4000, >2500 + +OBS_VAR11_NAME = CAPE +OBS_VAR11_LEVELS = L0-100000 +OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR11_THRESH = <=1000, >1000&&<2500, >2500&&<4000, >2500 + +FCST_VAR12_NAME = HPBL_Z0_ENS_MEAN +FCST_VAR12_LEVELS = Z0 +FCST_VAR12_THRESH = <500, <1500, >1500 + +OBS_VAR12_NAME = PBL +OBS_VAR12_LEVELS = L0 +OBS_VAR12_OPTIONS = desc = "TKE"; +OBS_VAR12_THRESH = <500, <1500, >1500 diff --git a/ush/templates/parm/metplus/PointStat_upper_air_prob.conf b/ush/templates/parm/metplus/PointStat_upper_air_prob.conf index bb3790689..cb653746b 100644 --- a/ush/templates/parm/metplus/PointStat_upper_air_prob.conf +++ b/ush/templates/parm/metplus/PointStat_upper_air_prob.conf @@ -151,45 +151,300 @@ OBS_VAR3_NAME = TMP OBS_VAR3_LEVELS = P850 OBS_VAR3_THRESH = >=298 -FCST_VAR4_NAME = DPT_P850_ENS_FREQ_ge283 +FCST_VAR4_NAME = TMP_P700_ENS_FREQ_ge273 FCST_VAR4_LEVELS = (*,*) FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = >=283 +OBS_VAR4_NAME = TMP +OBS_VAR4_LEVELS = P700 +OBS_VAR4_THRESH = >=273 -FCST_VAR5_NAME = DPT_P850_ENS_FREQ_ge288 +FCST_VAR5_NAME = TMP_P700_ENS_FREQ_ge278 FCST_VAR5_LEVELS = (*,*) FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P850 -OBS_VAR5_THRESH = >=288 +OBS_VAR5_NAME = TMP +OBS_VAR5_LEVELS = P700 +OBS_VAR5_THRESH = >=278 -FCST_VAR6_NAME = DPT_P850_ENS_FREQ_ge293 +FCST_VAR6_NAME = TMP_P700_ENS_FREQ_ge283 FCST_VAR6_LEVELS = (*,*) FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = DPT -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = >=293 +OBS_VAR6_NAME = TMP +OBS_VAR6_LEVELS = P700 +OBS_VAR6_THRESH = >=283 -FCST_VAR7_NAME = WIND_P850_ENS_FREQ_ge5 +FCST_VAR7_NAME = TMP_P500_ENS_FREQ_ge258 FCST_VAR7_LEVELS = (*,*) FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P850 -OBS_VAR7_THRESH = >=5 +OBS_VAR7_NAME = TMP +OBS_VAR7_LEVELS = P500 +OBS_VAR7_THRESH = >=258 -FCST_VAR8_NAME = WIND_P850_ENS_FREQ_ge10 +FCST_VAR8_NAME = TMP_P500_ENS_FREQ_ge263 FCST_VAR8_LEVELS = (*,*) FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P850 -OBS_VAR8_THRESH = >=10 +OBS_VAR8_NAME = TMP +OBS_VAR8_LEVELS = P500 +OBS_VAR8_THRESH = >=263 + +FCST_VAR9_NAME = TMP_P500_ENS_FREQ_ge268 +FCST_VAR9_LEVELS = (*,*) +FCST_VAR9_THRESH = ==0.1 + +OBS_VAR9_NAME = TMP +OBS_VAR9_LEVELS = P500 +OBS_VAR9_THRESH = >=268 + +FCST_VAR10_NAME = DPT_P850_ENS_FREQ_ge273 +FCST_VAR10_LEVELS = (*,*) +FCST_VAR10_THRESH = ==0.1 + +OBS_VAR10_NAME = DPT +OBS_VAR10_LEVELS = P850 +OBS_VAR10_THRESH = >=273 + +FCST_VAR11_NAME = DPT_P850_ENS_FREQ_ge278 +FCST_VAR11_LEVELS = (*,*) +FCST_VAR11_THRESH = ==0.1 + +OBS_VAR11_NAME = DPT +OBS_VAR11_LEVELS = P850 +OBS_VAR11_THRESH = >=278 + +FCST_VAR12_NAME = DPT_P850_ENS_FREQ_ge283 +FCST_VAR12_LEVELS = (*,*) +FCST_VAR12_THRESH = ==0.1 + +OBS_VAR12_NAME = DPT +OBS_VAR12_LEVELS = P850 +OBS_VAR12_THRESH = >=283 + +FCST_VAR13_NAME = DPT_P700_ENS_FREQ_ge263 +FCST_VAR13_LEVELS = (*,*) +FCST_VAR13_THRESH = ==0.1 + +OBS_VAR13_NAME = DPT +OBS_VAR13_LEVELS = P700 +OBS_VAR13_THRESH = >=263 + +FCST_VAR14_NAME = DPT_P700_ENS_FREQ_ge268 +FCST_VAR14_LEVELS = (*,*) +FCST_VAR14_THRESH = ==0.1 + +OBS_VAR14_NAME = DPT +OBS_VAR14_LEVELS = P700 +OBS_VAR14_THRESH = >=268 + +FCST_VAR15_NAME = DPT_P700_ENS_FREQ_ge273 +FCST_VAR15_LEVELS = (*,*) +FCST_VAR15_THRESH = ==0.1 + +OBS_VAR15_NAME = DPT +OBS_VAR15_LEVELS = P700 +OBS_VAR15_THRESH = >=273 + +FCST_VAR16_NAME = WIND_P850_ENS_FREQ_ge5 +FCST_VAR16_LEVELS = (*,*) +FCST_VAR16_THRESH = ==0.1 + +OBS_VAR16_NAME = WIND +OBS_VAR16_LEVELS = P850 +OBS_VAR16_THRESH = >=5 + +FCST_VAR17_NAME = WIND_P850_ENS_FREQ_ge10 +FCST_VAR17_LEVELS = (*,*) +FCST_VAR17_THRESH = ==0.1 + +OBS_VAR17_NAME = WIND +OBS_VAR17_LEVELS = P850 +OBS_VAR17_THRESH = >=10 + +FCST_VAR18_NAME = WIND_P850_ENS_FREQ_ge15 +FCST_VAR18_LEVELS = (*,*) +FCST_VAR18_THRESH = ==0.1 + +OBS_VAR18_NAME = WIND +OBS_VAR18_LEVELS = P850 +OBS_VAR18_THRESH = >=15 + +FCST_VAR19_NAME = WIND_P700_ENS_FREQ_ge10 +FCST_VAR19_LEVELS = (*,*) +FCST_VAR19_THRESH = ==0.1 + +OBS_VAR19_NAME = WIND +OBS_VAR19_LEVELS = P700 +OBS_VAR19_THRESH = >=10 + +FCST_VAR20_NAME = WIND_P700_ENS_FREQ_ge15 +FCST_VAR20_LEVELS = (*,*) +FCST_VAR20_THRESH = ==0.1 + +OBS_VAR20_NAME = WIND +OBS_VAR20_LEVELS = P700 +OBS_VAR20_THRESH = >=15 + +FCST_VAR21_NAME = WIND_P700_ENS_FREQ_ge20 +FCST_VAR21_LEVELS = (*,*) +FCST_VAR21_THRESH = ==0.1 + +OBS_VAR21_NAME = WIND +OBS_VAR21_LEVELS = P700 +OBS_VAR21_THRESH = >=20 + +FCST_VAR22_NAME = WIND_P500_ENS_FREQ_ge15 +FCST_VAR22_LEVELS = (*,*) +FCST_VAR22_THRESH = ==0.1 + +OBS_VAR22_NAME = WIND +OBS_VAR22_LEVELS = P500 +OBS_VAR22_THRESH = >=15 + +FCST_VAR23_NAME = WIND_P500_ENS_FREQ_ge21 +FCST_VAR23_LEVELS = (*,*) +FCST_VAR23_THRESH = ==0.1 + +OBS_VAR23_NAME = WIND +OBS_VAR23_LEVELS = P500 +OBS_VAR23_THRESH = >=21 + +FCST_VAR24_NAME = WIND_P500_ENS_FREQ_ge26 +FCST_VAR24_LEVELS = (*,*) +FCST_VAR24_THRESH = ==0.1 + +OBS_VAR24_NAME = WIND +OBS_VAR24_LEVELS = P500 +OBS_VAR24_THRESH = >=26 + +FCST_VAR25_NAME = WIND_P250_ENS_FREQ_ge26 +FCST_VAR25_LEVELS = (*,*) +FCST_VAR25_THRESH = ==0.1 + +OBS_VAR25_NAME = WIND +OBS_VAR25_LEVELS = P250 +OBS_VAR25_THRESH = >=26 + +FCST_VAR26_NAME = WIND_P250_ENS_FREQ_ge31 +FCST_VAR26_LEVELS = (*,*) +FCST_VAR26_THRESH = ==0.1 + +OBS_VAR26_NAME = WIND +OBS_VAR26_LEVELS = P250 +OBS_VAR26_THRESH = >=31 + +FCST_VAR27_NAME = WIND_P250_ENS_FREQ_ge36 +FCST_VAR27_LEVELS = (*,*) +FCST_VAR27_THRESH = ==0.1 + +OBS_VAR27_NAME = WIND +OBS_VAR27_LEVELS = P250 +OBS_VAR27_THRESH = >=36 + +FCST_VAR28_NAME = WIND_P250_ENS_FREQ_ge46 +FCST_VAR28_LEVELS = (*,*) +FCST_VAR28_THRESH = ==0.1 + +OBS_VAR28_NAME = WIND +OBS_VAR28_LEVELS = P250 +OBS_VAR28_THRESH = >=46 + +FCST_VAR29_NAME = WIND_P250_ENS_FREQ_ge62 +FCST_VAR29_LEVELS = (*,*) +FCST_VAR29_THRESH = ==0.1 + +OBS_VAR29_NAME = WIND +OBS_VAR29_LEVELS = P250 +OBS_VAR29_THRESH = >=62 + +FCST_VAR30_NAME = HGT_P500_ENS_FREQ_ge5400 +FCST_VAR30_LEVELS = (*,*) +FCST_VAR30_THRESH = ==0.1 + +OBS_VAR30_NAME = HGT +OBS_VAR30_LEVELS = P500 +OBS_VAR30_THRESH = >=5400 + +FCST_VAR31_NAME = HGT_P500_ENS_FREQ_ge5600 +FCST_VAR31_LEVELS = (*,*) +FCST_VAR31_THRESH = ==0.1 + +OBS_VAR31_NAME = HGT +OBS_VAR31_LEVELS = P500 +OBS_VAR31_THRESH = >=5600 + +FCST_VAR32_NAME = HGT_P500_ENS_FREQ_ge5880 +FCST_VAR32_LEVELS = (*,*) +FCST_VAR32_THRESH = ==0.1 + +OBS_VAR32_NAME = HGT +OBS_VAR32_LEVELS = P500 +OBS_VAR32_THRESH = >=5880 + +FCST_VAR33_NAME = CAPE_L0_ENS_FREQ_le1000 +FCST_VAR33_LEVELS = (*,*) +FCST_VAR33_THRESH = ==0.1 + +OBS_VAR33_NAME = CAPE +OBS_VAR33_LEVELS = L0-100000 +OBS_VAR33_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR33_THRESH = <=1000 + +FCST_VAR34_NAME = CAPE_L0_ENS_FREQ_gt1000.and.lt2500 +FCST_VAR34_LEVELS = (*,*) +FCST_VAR34_THRESH = ==0.1 + +OBS_VAR34_NAME = CAPE +OBS_VAR34_LEVELS = L0-100000 +OBS_VAR34_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR34_THRESH = >1000&&<2500 + +FCST_VAR35_NAME = CAPE_L0_ENS_FREQ_gt2500.and.lt4000 +FCST_VAR35_LEVELS = (*,*) +FCST_VAR35_THRESH = ==0.1 + +OBS_VAR35_NAME = CAPE +OBS_VAR35_LEVELS = L0-100000 +OBS_VAR35_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR35_THRESH = >2500&&<4000 + +FCST_VAR36_NAME = CAPE_L0_ENS_FREQ_gt2500 +FCST_VAR36_LEVELS = (*,*) +FCST_VAR36_THRESH = ==0.1 + +OBS_VAR36_NAME = CAPE +OBS_VAR36_LEVELS = L0-100000 +OBS_VAR36_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION; +OBS_VAR36_THRESH =>2500 + +FCST_VAR37_NAME = HPBL_Z0_ENS_FREQ_lt500 +FCST_VAR37_LEVELS = (*,*) +FCST_VAR37_THRESH = ==0.1 + +OBS_VAR37_NAME = PBL +OBS_VAR37_LEVELS = L0 +OBS_VAR37_OPTIONS = desc = "TKE"; +OBS_VAR37_THRESH = <500 + +FCST_VAR38_NAME = HPBL_Z0_ENS_FREQ_lt1500 +FCST_VAR38_LEVELS = (*,*) +FCST_VAR38_THRESH = ==0.1 + +OBS_VAR38_NAME = PBL +OBS_VAR38_LEVELS = L0 +OBS_VAR38_OPTIONS = desc = "TKE"; +OBS_VAR38_THRESH = <1500 + +FCST_VAR39_NAME = HPBL_Z0_ENS_FREQ_gt1500 +FCST_VAR39_LEVELS = (*,*) +FCST_VAR39_THRESH = ==0.1 + +OBS_VAR39_NAME = PBL +OBS_VAR39_LEVELS = L0 +OBS_VAR39_OPTIONS = desc = "TKE"; +OBS_VAR39_THRESH = >1500 # Forecast data description variables FCST_IS_PROB = True From e4bedfe284f8bf84452450742ba7db031c256b7b Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Tue, 4 Jan 2022 06:28:40 -0500 Subject: [PATCH 08/15] Modify -N setting on Cray (#655) --- scripts/exregional_run_fcst.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 8aa05ce34..5963fd6bc 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -108,7 +108,7 @@ case "$MACHINE" in if [ ${PE_MEMBER01} -gt 24 ];then APRUN="aprun -b -j1 -n${PE_MEMBER01} -N24 -d1 -cc depth" else - APRUN="aprun -b -j1 -n24 -N24 -d1 -cc depth" + APRUN="aprun -b -j1 -n${PE_MEMBER01} -N${PE_MEMBER01} -d1 -cc depth" fi ;; From 1aba292f15771cb3f2dcb1a9163f1cbdb635b91f Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Wed, 5 Jan 2022 06:07:48 -0500 Subject: [PATCH 09/15] Add a flag for MERRA2 (#659) --- scripts/exregional_run_fcst.sh | 28 ++++++++++--------- ...3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh | 2 ++ ush/config_defaults.sh | 9 ++++++ ush/generate_FV3LAM_wflow.sh | 12 ++++---- ush/setup.sh | 24 ++++++++++++++++ ush/valid_param_vals.sh | 1 + 6 files changed, 58 insertions(+), 18 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 5963fd6bc..513aa6634 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -410,20 +410,22 @@ done # #----------------------------------------------------------------------- # -for f_nm_path in ${FIXclim}/*; do - f_nm=$( basename "${f_nm_path}" ) - pre_f="${f_nm%%.*}" - - if [ "${pre_f}" = "merra2" ]; then - mnth=$( printf "%s\n" "${f_nm}" | grep -o -P '(?<=2014.m).*(?=.nc)' ) - symlink="${run_dir}/aeroclim.m${mnth}.nc" - else - symlink="${run_dir}/${pre_f}.dat" - fi - target="${f_nm_path}" - create_symlink_to_file target="$target" symlink="$symlink" \ +if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then + for f_nm_path in ${FIXclim}/*; do + f_nm=$( basename "${f_nm_path}" ) + pre_f="${f_nm%%.*}" + + if [ "${pre_f}" = "merra2" ]; then + mnth=$( printf "%s\n" "${f_nm}" | grep -o -P '(?<=2014.m).*(?=.nc)' ) + symlink="${run_dir}/aeroclim.m${mnth}.nc" + else + symlink="${run_dir}/${pre_f}.dat" + fi + target="${f_nm_path}" + create_symlink_to_file target="$target" symlink="$symlink" \ relative="${relative_link_flag}" -done + done +fi # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh index f391b2688..03dbd2c38 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.sh @@ -13,6 +13,8 @@ PREEXISTING_DIR_METHOD="rename" PREDEF_GRID_NAME="RRFS_CONUS_3km" CCPP_PHYS_SUITE="FV3_GFS_v15_thompson_mynn_lam3km" +USE_MERRA_CLIMO="TRUE" + EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" USE_USER_STAGED_EXTRN_FILES="TRUE" diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 702cf4db8..c3090cc8b 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1333,6 +1333,15 @@ RUN_TASK_VX_ENSPOINT="FALSE" # #----------------------------------------------------------------------- # +# Flag that determines whether MERRA2 aerosol climatology data and +# lookup tables for optics properties are obtained +# +#----------------------------------------------------------------------- +# +USE_MERRA_CLIMO="FALSE" +# +#----------------------------------------------------------------------- +# # Set the array parameter containing the names of all the fields that the # MAKE_SFC_CLIMO_TN task generates on the native FV3-LAM grid. # diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index d19a58b52..db65baec2 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -618,18 +618,20 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" " +if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then + print_info_msg "$VERBOSE" " Copying MERRA2 aerosol climatology data files from system directory (FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: FIXaer = \"${FIXaer}\" FIXlut = \"${FIXlut}\" FIXclim = \"${FIXclim}\"" -check_for_preexist_dir_file "${FIXclim}" "delete" -mkdir_vrfy -p "${FIXclim}" + check_for_preexist_dir_file "${FIXclim}" "delete" + mkdir_vrfy -p "${FIXclim}" -cp_vrfy "${FIXaer}/merra2.aerclim"*".nc" "${FIXclim}/" -cp_vrfy "${FIXlut}/optics"*".dat" "${FIXclim}/" + cp_vrfy "${FIXaer}/merra2.aerclim"*".nc" "${FIXclim}/" + cp_vrfy "${FIXlut}/optics"*".dat" "${FIXclim}/" +fi # #----------------------------------------------------------------------- # diff --git a/ush/setup.sh b/ush/setup.sh index ee49750ae..88dcab21e 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -790,6 +790,30 @@ check_var_valid_value \ # #----------------------------------------------------------------------- # +# Make sure that USE_MERRA_CLIMO is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "USE_MERRA_CLIMO" "valid_vals_USE_MERRA_CLIMO" +# +# Set USE_MERRA_CLIMO to either "TRUE" or "FALSE" so we don't +# have to consider other valid values later on. +# +USE_MERRA_CLIMO=$(echo_uppercase $USE_MERRA_CLIMO) +if [ "${USE_MERRA_CLIMO}" = "TRUE" ] || \ + [ "${USE_MERRA_CLIMO}" = "YES" ]; then + USE_MERRA_CLIMO="TRUE" +elif [ "${USE_MERRA_CLIMO}" = "FALSE" ] || \ + [ "${USE_MERRA_CLIMO}" = "NO" ]; then + USE_MERRA_CLIMO="FALSE" +fi +# Force to "TRUE" in case of FV3_GFS_v15_thompson_mynn_lam3km: +if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_v15_thompson_mynn_lam3km" ]; then + USE_MERRA_CLIMO="TRUE" +fi +# +#----------------------------------------------------------------------- +# # Make sure that FCST_MODEL is set to a valid value. # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index a8bd0ae72..c42144130 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -78,3 +78,4 @@ valid_vals_FVCOM_WCSTART=("warm" "WARM" "cold" "COLD") valid_vals_COMPILER=("intel" "gnu") valid_vals_SUB_HOURLY_POST=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_DT_SUBHOURLY_POST_MNTS=("1" "01" "2" "02" "3" "03" "4" "04" "5" "05" "6" "06" "10" "12" "15" "20" "30") +valid_vals_USE_MERRA_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") From 34ab11fab73e962398ec1d21bfcd48d1df8afdea Mon Sep 17 00:00:00 2001 From: JeffBeck-NOAA <55201531+JeffBeck-NOAA@users.noreply.github.com> Date: Mon, 10 Jan 2022 15:02:11 -0700 Subject: [PATCH 10/15] Verification modifications to conf files. (#662) --- ush/templates/parm/metplus/PointStat_conus_sfc.conf | 8 ++++---- ush/templates/parm/metplus/PointStat_upper_air.conf | 4 ++-- ush/templates/parm/metplus/REFC.conf | 4 ++-- ush/templates/parm/metplus/RETOP.conf | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ush/templates/parm/metplus/PointStat_conus_sfc.conf b/ush/templates/parm/metplus/PointStat_conus_sfc.conf index 4eccff4c1..779fa73b4 100644 --- a/ush/templates/parm/metplus/PointStat_conus_sfc.conf +++ b/ush/templates/parm/metplus/PointStat_conus_sfc.conf @@ -83,7 +83,7 @@ PB2NC_MESSAGE_TYPE = ADPSFC, ADPUPA # Leave empty to process all # PB2NC_OBS_BUFR_VAR_LIST = POB, QOB, TOB, ZOB, UOB, VOB, PMO, TOCC, TDO, HOVI, D_DPT, D_WDIR, D_WIND, D_RH, D_MIXR, D_PRMSL -PB2NC_OBS_BUFR_VAR_LIST = PMO, ZOB, TOB, TDO, QOB, UOB, VOB, PWO, TOCC, D_RH, HOVI, CEILING, D_PBL, D_CAPE, MXGS, D_WIND +PB2NC_OBS_BUFR_VAR_LIST = PMO, ZOB, TOB, TDO, QOB, UOB, VOB, PWO, TOCC, D_WIND, D_RH, HOVI, CEILING, D_PBL, D_CAPE, MXGS # For defining the time periods for summarization # False for no time summary, True otherwise @@ -147,7 +147,7 @@ BOTH_VAR5_THRESH = >=2.572 ;; m/s or 5kts BOTH_VAR6_NAME = WIND BOTH_VAR6_LEVELS = Z10 -BOTH_VAR6_THRESH = >=2.572 ;; m/s or 5kts +BOTH_VAR6_THRESH = >=2.572, >=2.572 && <5.144, >=5.144, >=10.288, >=15.433 ;; m/s or 5, 10, 20, 30kts BOTH_VAR6_OPTIONS = GRIB2_pdt = 0; ;; derive instantaneous 10-m wind from U/V components, overriding max 10-m wind BOTH_VAR7_NAME = PRMSL @@ -155,7 +155,7 @@ BOTH_VAR7_LEVELS = Z0 FCST_VAR8_NAME = TCDC FCST_VAR8_LEVELS = L0 -FCST_VAR8_OPTIONS = GRIB_lvl_typ = 200; GRIB2_ipdtmpl_index = 8; GRIB2_ipdtmpl_val = {lead?fmt=%H}; +BOTH_VAR8_OPTIONS = GRIB_lvl_typ = 200; interp = { type = [ { method = NEAREST; width = 1; } ]; } OBS_VAR8_NAME = TCDC OBS_VAR8_LEVELS = L0 OBS_VAR8_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } @@ -163,7 +163,7 @@ OBS_VAR8_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } BOTH_VAR9_NAME = VIS BOTH_VAR9_LEVELS = L0 BOTH_VAR9_THRESH = <805, <1609, <4828, <8045 ,>=8045, <16090 -BOTH_VAR9_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } +BOTH_VAR9_OPTIONS = censor_thresh = [>16090]; censor_val = [16090]; interp = { type = [ { method = NEAREST; width = 1; } ]; } BOTH_VAR10_NAME = GUST BOTH_VAR10_LEVELS = Z0 diff --git a/ush/templates/parm/metplus/PointStat_upper_air.conf b/ush/templates/parm/metplus/PointStat_upper_air.conf index cd05d930b..ca60b7d5d 100644 --- a/ush/templates/parm/metplus/PointStat_upper_air.conf +++ b/ush/templates/parm/metplus/PointStat_upper_air.conf @@ -83,7 +83,7 @@ PB2NC_MESSAGE_TYPE = ADPSFC, ADPUPA # Leave empty to process all # PB2NC_OBS_BUFR_VAR_LIST = POB, QOB, TOB, ZOB, UOB, VOB, PMO, TOCC, TDO, HOVI, D_DPT, D_WDIR, D_WIND, D_RH, D_MIXR, D_PRMSL -PB2NC_OBS_BUFR_VAR_LIST = PMO, ZOB, TOB, TDO, QOB, UOB, VOB, PWO, TOCC, D_RH, HOVI, CEILING, D_PBL, D_CAPE, MXGS, D_WIND +PB2NC_OBS_BUFR_VAR_LIST = PMO, ZOB, TOB, TDO, QOB, UOB, VOB, PWO, TOCC, D_RH, D_WIND, HOVI, CEILING, D_PBL, D_CAPE, MXGS # For defining the time periods for summarization # False for no time summary, True otherwise @@ -147,7 +147,7 @@ BOTH_VAR5_THRESH = >=2.572 ;; m/s or 5kts BOTH_VAR6_NAME = WIND BOTH_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -BOTH_VAR6_THRESH = >=2.572 ;; m/s or 5kts +BOTH_VAR6_THRESH = >=2.572, >=2.572 && <5.144, >=5.144, >=10.288, >=15.433, >=20.577, >=25.722 ;; m/s or 5, 10, 20, 30, 40, 50kts BOTH_VAR7_NAME = HGT BOTH_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 diff --git a/ush/templates/parm/metplus/REFC.conf b/ush/templates/parm/metplus/REFC.conf index 9a3fdb2d1..a661fabc8 100644 --- a/ush/templates/parm/metplus/REFC.conf +++ b/ush/templates/parm/metplus/REFC.conf @@ -82,13 +82,13 @@ GRID_STAT_REGRID_TO_GRID = FCST # Forecast/Observation variable Information FCST_VAR1_NAME = REFC FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = cnt_thresh = [ >15 ]; +FCST_VAR1_OPTIONS = cnt_thresh = [ >15 ]; cnt_logic = UNION; BOTH_VAR1_THRESH = ge20, ge30, ge40, ge50 OBS_VAR1_NAME = MergedReflectivityQCComposite OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; censor_val = -20.0; cnt_thresh = [ >15 ]; cnt_logic = UNION; +OBS_VAR1_OPTIONS = censor_thresh = [eq-999, <-20]; censor_val = [-9999, -20]; cnt_thresh = [ >15 ]; cnt_logic = UNION; OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 OBS_GRID_STAT_FILE_WINDOW_END = 300 diff --git a/ush/templates/parm/metplus/RETOP.conf b/ush/templates/parm/metplus/RETOP.conf index 5508bd600..c09ea7a18 100644 --- a/ush/templates/parm/metplus/RETOP.conf +++ b/ush/templates/parm/metplus/RETOP.conf @@ -82,13 +82,13 @@ GRID_STAT_REGRID_TO_GRID = FCST # Forecast/Observation variable Information FCST_VAR1_NAME = RETOP FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = cnt_thresh = [ >15 ]; convert(x) = M_to_KFT(x); +FCST_VAR1_OPTIONS = convert(x) = M_to_KFT(x); cnt_thresh = [ >0 ]; cnt_logic = UNION; BOTH_VAR1_THRESH = ge20, ge30, ge40, ge50 OBS_VAR1_NAME = EchoTop18 OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; censor_val = -20.0; cnt_thresh = [ >15 ]; cnt_logic = UNION; convert(x) = KM_to_KFT(x); +OBS_VAR1_OPTIONS = convert(x) = KM_to_KFT(x); censor_thresh = [<=-9.84252,eq-3.28084]; censor_val = [-9999,-16.4042]; cnt_thresh = [ >0 ]; cnt_logic = UNION; OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 OBS_GRID_STAT_FILE_WINDOW_END = 300 From f328f0bf345734c7266d9d4353d8bd4ae61d86fe Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Wed, 12 Jan 2022 18:56:02 -0600 Subject: [PATCH 11/15] Refactor supported input path handling. (#653) ## DESCRIPTION OF CHANGES: This PR is one of two in a series that addresses refactoring platform-dependence. The topic of this PR is to refactor the logic for supporting platforms with specific real-time data feeds. Here is a list of features and modifications that were made: - Apply appropriate style for functions, mainly related to white space for nested code and comments. - Make external model file offset hours configurable for analysis- and forecast-type files because sometimes we like to start a forecast from a forecast of another model. - Refactor to reduce duplication of information. - Set up filenames and paths to be specified with templates in a consistent way for every model instead of with bash logic that is different for every model's naming convention. - Remove paths that do not exist on platforms I have access to: Jet and Hera. No changes were made to the logic for input managed by USE_USER_STAGED_EXTRN_FILES or COMINGgfs for NCO mode, although it could make sense to re-assess the NCO mode handling at a later date. I plan to go through and "review" the code to lead reviewers through this one since it bit of change. It may be helpful to view it using GitHub's ignore whitespace feature. ## TESTS CONDUCTED: Test cases using the WE2E test on Hera; see PR for full list. A test case for the same forecast configuration using known paths on Hera to exercise the new code. I checked that arrays were consistent, that the script exits in a sane manner when files are not available on disk or HPSS, and that I haven't broken anything with the way files are handled through the "user specified" mechanism necessary for the test framework. ## ISSUE (optional): This work is an incremental change in support of Issue #618 ## CONTRIBUTORS (optional): @christopherwharrop-noaa @venitahagerty @robgonzalezpita --- jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 25 +- ush/config_defaults.sh | 20 + ush/get_extrn_mdl_file_dir_info.sh | 1023 ++++++++++------------------ ush/set_extrn_mdl_params.sh | 384 +++-------- 4 files changed, 477 insertions(+), 975 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index bfe2d7bce..a452062c5 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -167,28 +167,29 @@ check_var_valid_value "ICS_OR_LBCS" "valid_vals_ICS_OR_LBCS" # #----------------------------------------------------------------------- # -# Set the parameter anl_or_fcst that determines whether we want to get -# analysis or forecast files. This depends on whether we want these files -# to generate initial condition and surface field files or lateral boundary -# condition files. Also, set time_offset_hrs, which is the offset in -# hours between the current cycle's starting time and the starting time -# of the external model providing the LBCs. +# Set parameters for grabbing either the initial conditions from analysis or +# forecast files of external models, or the lateral boundary conditions +# from external models. The script has been called to do the work for +# one or the other. # #----------------------------------------------------------------------- # if [ "${ICS_OR_LBCS}" = "ICS" ]; then - anl_or_fcst="ANL" - time_offset_hrs="0" + if [ ${EXTRN_MDL_ICS_OFFSET_HRS} -eq 0 ] ; then + anl_or_fcst="ANL" + time_offset_hrs=0 + else + anl_or_fcst="FCST" + time_offset_hrs=${EXTRN_MDL_ICS_OFFSET_HRS:-0} + fi elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then anl_or_fcst="FCST" - time_offset_hrs="${EXTRN_MDL_LBCS_OFFSET_HRS}" + time_offset_hrs=${EXTRN_MDL_LBCS_OFFSET_HRS:-0} fi # #----------------------------------------------------------------------- # -# Set the name of and then create the directory in which to stage the -# external model files for the current cycle (if it doesn't already exist). -# Then change location to that directory. +# Create the directory where the exetrnal model files should be stored # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index c3090cc8b..21d6096a1 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -604,6 +604,24 @@ NDAS_OBS_DIR="/path/to/observation-directory/ndas/proc" # data availble at least every 6 hours. It is up to the user to ensure # that this is the case. # +# EXTRN_MDL_ICS_OFFSET_HRS: +# Users may wish to start a forecast from a forecast of a previous cycle +# of an external model. This variable sets the number of hours earlier +# the external model started than when the FV3 forecast configured here +# should start. For example, the forecast should start from a 6 hour +# forecast of the GFS, then EXTRN_MDL_ICS_OFFSET_HRS=6. + +# EXTRN_MDL_LBCS_OFFSET_HRS: +# Users may wish to use lateral boundary conditions from a forecast that +# was started earlier than the initial time for the FV3 forecast +# configured here. This variable sets the number of hours earlier +# the external model started than when the FV3 forecast configured here +# should start. For example, the forecast should use lateral boundary +# conditions from the GFS started 6 hours earlier, then +# EXTRN_MDL_LBCS_OFFSET_HRS=6. +# Note: the default value is model-dependent and set in +# set_extrn_mdl_params.sh +# # FV3GFS_FILE_FMT_ICS: # If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS # is set to "FV3GFS"), this variable specifies the format of the model @@ -619,6 +637,8 @@ NDAS_OBS_DIR="/path/to/observation-directory/ndas/proc" EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" LBC_SPEC_INTVL_HRS="6" +EXTRN_MDL_ICS_OFFSET_HRS="0" +EXTRN_MDL_LBCS_OFFSET_HRS="" FV3GFS_FILE_FMT_ICS="nemsio" FV3GFS_FILE_FMT_LBCS="nemsio" # diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index ff7d1bfda..fdab7dad4 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -18,83 +18,10 @@ # #----------------------------------------------------------------------- # -function get_extrn_mdl_file_dir_info() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "extrn_mdl_name" \ - "anl_or_fcst" \ - "cdate_FV3LAM" \ - "time_offset_hrs" \ - "varname_extrn_mdl_cdate" \ - "varname_extrn_mdl_lbc_spec_fhrs" \ - "varname_extrn_mdl_fns_on_disk" \ - "varname_extrn_mdl_fns_in_arcv" \ - "varname_extrn_mdl_sysdir" \ - "varname_extrn_mdl_arcv_fmt" \ - "varname_extrn_mdl_arcv_fns" \ - "varname_extrn_mdl_arcv_fps" \ - "varname_extrn_mdl_arcvrel_dir" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script/function. Note that these will be printed out only if VERBOSE -# is set to TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Check arguments. -# -#----------------------------------------------------------------------- -# -if [ 0 = 1 ]; then - if [ "$#" -ne "13" ]; then +usage () { - print_err_msg_exit " +echo " Incorrect number of arguments specified: Function name: \"${func_name}\" @@ -117,7 +44,7 @@ Usage: varname_extrn_mdl_arcvrel_dir where the arguments are defined as follows: - + extrn_mdl_name: Name of the external model, i.e. the name of the model providing the fields from which files containing initial conditions, surface fields, @@ -160,9 +87,13 @@ where the arguments are defined as follows: boundary condition (LBC) output files are obtained from the external model (and will be used to update the LBCs of the FV3-LAM). - varname_extrn_mdl_fns: - Name of the global variable that will contain the names of the exter- - nal model output files. + varname_extrn_mdl_fns_on_disk: + Name of the global variable that will contain the expected names of + the external model output files on disk. + + varname_extrn_mdl_fns_in_arcv: + Name of the global variable that will contain the expected names of + the external model output files on NOAA HPSS. varname_extrn_mdl_sysdir: Name of the global variable that will contain the system directory in @@ -187,32 +118,8 @@ where the arguments are defined as follows: rectory, i.e. the directory \"inside\" the archive file in which the ex- ternal model output files may be stored. " +} - fi - -fi - - -# -#----------------------------------------------------------------------- -# -# Declare additional local variables. -# -#----------------------------------------------------------------------- -# - local yyyy mm dd hh mn yyyymmdd \ - lbc_spec_fhrs i num_fhrs \ - yy ddd fcst_hhh fcst_hh fcst_mn \ - prefix suffix fns fns_on_disk fns_in_arcv \ - sysbasedir sysdir \ - arcv_dir arcv_fmt arcv_fns arcv_fps arcvrel_dir -# -#----------------------------------------------------------------------- -# -# Declare local function to avoid repetition -# -#----------------------------------------------------------------------- -# function quit_unless_user_spec_data() { if [ "${USE_USER_STAGED_EXTRN_FILES}" != "TRUE" ]; then print_err_msg_exit "\ @@ -222,236 +129,195 @@ has not been specified for this external model and machine combination: MACHINE = \"$MACHINE\"" fi } -# -#----------------------------------------------------------------------- -# -# Check input variables for valid values. -# -#----------------------------------------------------------------------- -# + +function get_extrn_mdl_file_dir_info() { + + { save_shell_opts; set -u +x; } > /dev/null 2>&1 + + local func_name="${FUNCNAME[0]}" + # + #----------------------------------------------------------------------- + # + # Specify the set of valid argument names for this script/function. Then + # process the arguments provided to this script/function (which should + # consist of a set of name-value pairs of the form arg1="value1", etc). + # + #----------------------------------------------------------------------- + # + local valid_args=( \ + "extrn_mdl_name" \ + "anl_or_fcst" \ + "cdate_FV3LAM" \ + "time_offset_hrs" \ + "varname_extrn_mdl_cdate" \ + "varname_extrn_mdl_lbc_spec_fhrs" \ + "varname_extrn_mdl_fns_on_disk" \ + "varname_extrn_mdl_fns_in_arcv" \ + "varname_extrn_mdl_sysdir" \ + "varname_extrn_mdl_arcv_fmt" \ + "varname_extrn_mdl_arcv_fns" \ + "varname_extrn_mdl_arcv_fps" \ + "varname_extrn_mdl_arcvrel_dir" \ + ) + process_args valid_args "$@" + + if [ "$#" -ne "13" ]; then + print_err_msg_exit $(usage) + fi + + # + #----------------------------------------------------------------------- + # + # For debugging purposes, print out values of arguments passed to this + # script/function. Note that these will be printed out only if VERBOSE + # is set to TRUE. + # + #----------------------------------------------------------------------- + # + print_input_args valid_args + + # + #----------------------------------------------------------------------- + # + # Declare additional local variables. + # + #----------------------------------------------------------------------- + # + local yyyy yy mm dd hh mn yyyymmdd ddd \ + lbc_spec_fhrs i num_fhrs \ + fcst_hhh fcst_hh fcst_mn \ + prefix suffix fns fns_on_disk fns_in_arcv \ + sysbasedir sysdir \ + arcv_dir arcv_fmt arcv_fns arcv_fps arcvrel_dir + anl_or_fcst=$(echo_uppercase $anl_or_fcst) valid_vals_anl_or_fcst=( "ANL" "FCST" ) check_var_valid_value "anl_or_fcst" "valid_vals_anl_or_fcst" -# -#----------------------------------------------------------------------- -# -# Extract from cdate_FV3LAM the starting year, month, day, and hour of -# the FV3-LAM cycle. Then subtract the temporal offset specified in -# time_offset_hrs (assumed to be given in units of hours) from cdate_FV3LAM -# to obtain the starting date and time of the external model, express the -# result in YYYYMMDDHH format, and save it in cdate. This is the starting -# time of the external model forecast. -# -#----------------------------------------------------------------------- -# - yyyy=${cdate_FV3LAM:0:4} - mm=${cdate_FV3LAM:4:2} - dd=${cdate_FV3LAM:6:2} + # + #----------------------------------------------------------------------- + # + # Set cdate to the start time for the external model being used. + # + #----------------------------------------------------------------------- + # hh=${cdate_FV3LAM:8:2} yyyymmdd=${cdate_FV3LAM:0:8} + # Adjust time for offset cdate=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${time_offset_hrs} hours" "+%Y%m%d%H" ) -# -#----------------------------------------------------------------------- -# -# Extract from cdate the starting year, month, day, and hour of the external -# model forecast. Also, set the starting minute to "00" and get the date -# without the time-of-day. These are needed below in setting various -# directory and file names. -# -#----------------------------------------------------------------------- -# + yyyy=${cdate:0:4} + yy=${yyyy:2:4} mm=${cdate:4:2} dd=${cdate:6:2} hh=${cdate:8:2} mn="00" yyyymmdd=${cdate:0:8} -# -#----------------------------------------------------------------------- -# -# Initialize lbc_spec_fhrs to an empty array. Then, if considering a -# forecast, reset lbc_spec_fhrs to the array of forecast hours at which -# the lateral boundary conditions (LBCs) are to be updated, starting with -# the 2nd such time (i.e. the one having array index 1). We do not include -# the first hour (hour 0) because at this initial time, the LBCs are -# obtained from the analysis fields provided by the external model (as -# opposed to a forecast field). -# -#----------------------------------------------------------------------- -# + # Julian day -- not 3 digit day of month + ddd=$( $DATE_UTIL --utc --date "${yyyy}-${mm}-${dd} ${hh}:${mn} UTC" "+%j" ) + # + #----------------------------------------------------------------------- + # + # Initialize lbc_spec_fhrs array. Skip the initial time, since it is + # handled separately. + # + #----------------------------------------------------------------------- + # lbc_spec_fhrs=( "" ) if [ "${anl_or_fcst}" = "FCST" ]; then lbc_spec_fhrs=( "${LBC_SPEC_FCST_HRS[@]}" ) -# -# Add the temporal offset specified in time_offset_hrs (assumed to be in -# units of hours) to the the array of LBC update forecast hours to make -# up for shifting the starting hour back in time. After this addition, -# lbc_spec_fhrs will contain the LBC update forecast hours relative to -# the start time of the external model run. -# + num_fhrs=${#lbc_spec_fhrs[@]} for (( i=0; i<=$((num_fhrs-1)); i++ )); do + # Add in offset to account for shift in initial time lbc_spec_fhrs[$i]=$(( ${lbc_spec_fhrs[$i]} + time_offset_hrs )) done fi -# -#----------------------------------------------------------------------- -# -# Set additional parameters needed in forming the names of the external -# model files only under certain circumstances. -# -#----------------------------------------------------------------------- -# - if [ "${extrn_mdl_name}" = "RAP" ] || \ - [ "${extrn_mdl_name}" = "HRRR" ] || \ - [ "${extrn_mdl_name}" = "NAM" ] || \ - [ "${extrn_mdl_name}" = "FV3GFS" -a "${MACHINE}" = "JET" ]; then -# -# Get the Julian day-of-year of the starting date and time of the exter- -# nal model forecast. -# - ddd=$( $DATE_UTIL --utc --date "${yyyy}-${mm}-${dd} ${hh}:${mn} UTC" "+%j" ) -# -# Get the last two digits of the year of the starting date and time of -# the external model forecast. -# - yy=${yyyy:2:4} + # + #----------------------------------------------------------------------- + # + # The model may be started with a variety of file types from FV3GFS. + # Set that file type now + # + #----------------------------------------------------------------------- + # - fi -# -#----------------------------------------------------------------------- -# -# Set the external model output file names that must be obtained (from -# disk if available, otherwise from HPSS). -# -#----------------------------------------------------------------------- -# if [ "${anl_or_fcst}" = "ANL" ]; then fv3gfs_file_fmt="${FV3GFS_FILE_FMT_ICS}" elif [ "${anl_or_fcst}" = "FCST" ]; then fv3gfs_file_fmt="${FV3GFS_FILE_FMT_LBCS}" fi + # + #----------------------------------------------------------------------- + # + # Generate an array of file names expected from the external model + # Assume that filenames in archive and on disk are the same, unless + # otherwise specified (primarily on Jet). + # + #----------------------------------------------------------------------- + # + declare -a fns_on_disk + declare -a fns_in_arcv case "${anl_or_fcst}" in -# -#----------------------------------------------------------------------- -# -# Consider analysis files (possibly including surface files). -# -#----------------------------------------------------------------------- -# - "ANL") - - fcst_hh="00" - fcst_mn="00" - - case "${extrn_mdl_name}" in - - "GSMGFS") -# fns=( "atm" "sfc" "nst" ) - fns=( "atm" "sfc" ) - prefix="gfs.t${hh}z." - fns=( "${fns[@]/#/$prefix}" ) - suffix="anl.nemsio" - fns_on_disk=( "${fns[@]/%/$suffix}" ) - fns_in_arcv=( "${fns[@]/%/$suffix}" ) - ;; - "FV3GFS") - - if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then - - fns=( "atm" "sfc" ) - suffix="anl.nemsio" - fns=( "${fns[@]/%/$suffix}" ) - -# Set names of external files if searching on disk. - if [ "${MACHINE}" = "JET" ]; then - prefix="${yy}${ddd}${hh}00.gfs.t${hh}z." - else - prefix="gfs.t${hh}z." + "ANL") + + fcst_hh="00" + fcst_mn="00" + + case "${extrn_mdl_name}" in + + "GSMGFS") + fns_in_arcv=("gfs.t${hh}z.atmanl.nemsio" "gfs.t${hh}z.sfcanl.nemsio") + ;; + + "FV3GFS") + case "${fv3gfs_file_fmt}" in + "nemsio") + fns_in_arcv=("gfs.t${hh}z.atmanl.nemsio" "gfs.t${hh}z.sfcanl.nemsio") + + # File names are prefixed with a date time on Jet + if [ "${MACHINE}" = "JET" ]; then + prefix="${yy}${ddd}${hh}00" + fns_on_disk=( ${fns_in_arcv[@]/#/$prefix}) + fi + ;; + "grib2") + fns_in_arcv=( "gfs.t${hh}z.pgrb2.0p25.f000" ) + ;; + "netcdf") + fns_in_arcv=("gfs.t${hh}z.atmanl.nc" "gfs.t${hh}z.sfcanl.nc") + # File names are prefixed with a date time on Jet + if [ "${MACHINE}" = "JET" ]; then + prefix="${yy}${ddd}${hh}00" + fns_on_disk=( ${fns_in_arcv[@]/#/$prefix}) + fi + ;; + esac + ;; + + "RAP") + ;& # Fall through. RAP and HRRR follow same naming rules + + "HRRR") + fns_in_arcv=( "${yy}${ddd}${hh}${mn}${fcst_hh}${fcst_mn}" ) + if [ "${MACHINE}" = "JET" ]; then + fns_on_disk=( "${yy}${ddd}${hh}${mn}${fcst_mn}${fcst_hh}${fcst_mn}" ) fi - fns_on_disk=( "${fns[@]/#/$prefix}" ) - -# Set names of external files if searching in an archive file, e.g. from -# HPSS. - prefix="gfs.t${hh}z." - fns_in_arcv=( "${fns[@]/#/$prefix}" ) - - elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then - -# GSK 12/16/2019: -# Turns out that the .f000 file contains certain necessary fields that -# are not in the .anl file, so switch to the former. -# fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. -# fns=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. - fns_on_disk=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. - fns_in_arcv=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. - - elif [ "${fv3gfs_file_fmt}" = "netcdf" ]; then + ;; - fns=( "atm" "sfc" ) - suffix="anl.nc" - fns=( "${fns[@]/%/$suffix}" ) + "NAM") + fns=( "" ) + fns_in_arcv=( "nam.t${hh}z.bgrdsf${fcst_hh}.tm00" ) + ;; -# Set names of external files if searching on disk. - if [ "${MACHINE}" = "JET" ]; then - prefix="${yy}${ddd}${hh}00.gfs.t${hh}z." - else - prefix="gfs.t${hh}z." - fi - fns_on_disk=( "${fns[@]/#/$prefix}" ) - -# Set names of external files if searching in an archive file, e.g. from -# HPSS. - prefix="gfs.t${hh}z." - fns_in_arcv=( "${fns[@]/#/$prefix}" ) - - fi - ;; - - "RAP") -# -# Note that this is GSL RAPX data, not operational NCEP RAP data. An option for the latter -# may be added in the future. -# - if [ "${MACHINE}" = "JET" ]; then - fns_on_disk=( "wrfnat_130_${fcst_hh}.grib2" ) - else - fns_on_disk=( "${yy}${ddd}${hh}${mn}${fcst_hh}${fcst_mn}" ) - fi - fns_in_arcv=( "${yy}${ddd}${hh}${mn}${fcst_hh}${fcst_mn}" ) - ;; - - "HRRR") -# -# Note that this is GSL HRRRX data, not operational NCEP HRRR data. An option for the latter -# may be added in the future. -# - if [ "${MACHINE}" = "JET" ]; then - fns_on_disk=( "wrfnat_hrconus_${fcst_hh}.grib2" ) - else - fns_on_disk=( "${yy}${ddd}${hh}${mn}${fcst_hh}${fcst_mn}" ) - fi - fns_in_arcv=( "${yy}${ddd}${hh}${mn}${fcst_hh}${fcst_mn}" ) - ;; - - "NAM") - fns=( "" ) - prefix="nam.t${hh}z.bgrdsfi${hh}" - fns=( "${fns[@]/#/$prefix}" ) - suffix=".tm${hh}" - fns_on_disk=( "${fns[@]/%/$suffix}" ) - fns_in_arcv=( "${fns[@]/%/$suffix}" ) - ;; - - *) - if [ "${USE_USER_STAGED_EXTRN_FILES}" != "TRUE" ]; then - print_err_msg_exit "\ + *) + if [ "${USE_USER_STAGED_EXTRN_FILES}" != "TRUE" ]; then + print_err_msg_exit "\ The external model file names (either on disk or in archive files) have not yet been specified for this combination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): @@ -460,343 +326,182 @@ and analysis or forecast (anl_or_fcst): fi ;; - esac + esac # End external model case for ANL files ;; -# -#----------------------------------------------------------------------- -# -# Consider forecast files. -# -#----------------------------------------------------------------------- -# - "FCST") - - fcst_mn="00" - - case "${extrn_mdl_name}" in - "GSMGFS") + "FCST") + fcst_mn="00" fcst_hhh=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) - prefix="gfs.t${hh}z.atmf" - fns=( "${fcst_hhh[@]/#/$prefix}" ) - suffix=".nemsio" - fns_on_disk=( "${fns[@]/%/$suffix}" ) - fns_in_arcv=( "${fns[@]/%/$suffix}" ) - ;; - - "FV3GFS") - - if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then - - fcst_hhh=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) - suffix=".nemsio" - fns=( "${fcst_hhh[@]/%/$suffix}" ) + fcst_hh=( $( printf "%02d " "${lbc_spec_fhrs[@]}" ) ) - if [ "${MACHINE}" = "JET" ]; then - prefix="${yy}${ddd}${hh}00.gfs.t${hh}z.atmf" - else - prefix="gfs.t${hh}z.atmf" + case "${extrn_mdl_name}" in + + "GSMGFS") + fn_tmpl="gfs.t${hh}z.atmfFHR3.nemsio" + ;; + + "FV3GFS") + + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then + fn_tmpl="gfs.t${hh}z.atmfFHR3.nemsio" + if [ "${MACHINE}" = "JET" ]; then + disk_tmpl="${yy}${ddd}${hh}00.gfs.t${hh}z.atmfFHR3.nemsio" + for fhr in ${fcst_hhh[@]} ; do + fns_on_disk+=(${disk_tmpl/FHR3/$fhr}) + done + fi + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then + fn_tmpl="gfs.t${hh}z.pgrb2.0p25.fFHR3" + elif [ "${fv3gfs_file_fmt}" = "netcdf" ]; then + fn_tmpl="gfs.t${hh}z.atmfFHR3.nc" + if [ "${MACHINE}" = "JET" ]; then + disk_tmpl="${yy}${ddd}${hh}00.gfs.t${hh}z.atmfFHR3.nc" + for fhr in ${fcst_hhh[@]} ; do + fns_on_disk+=(${disk_tmpl/FHR3/$fhr}) + done + fi fi - fns_on_disk=( "${fns[@]/#/$prefix}" ) - - prefix="gfs.t${hh}z.atmf" - fns_in_arcv=( "${fns[@]/#/$prefix}" ) - - elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then - - fcst_hhh=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) - prefix="gfs.t${hh}z.pgrb2.0p25.f" - fns_on_disk=( "${fcst_hhh[@]/#/$prefix}" ) - fns_in_arcv=( "${fcst_hhh[@]/#/$prefix}" ) - - elif [ "${fv3gfs_file_fmt}" = "netcdf" ]; then - - fcst_hhh=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) - suffix=".nc" - fns=( "${fcst_hhh[@]/%/$suffix}" ) - - if [ "${MACHINE}" = "JET" ]; then - prefix="${yy}${ddd}${hh}00.gfs.t${hh}z.atmf" - else - prefix="gfs.t${hh}z.atmf" + ;; + + "RAP") + ;& # Fall through since RAP and HRRR are named the same + + "HRRR") + fn_tmpl="${yy}${ddd}${hh}00FHR200" + if [ "${MACHINE}" = "JET" ]; then + disk_tmpl="${yy}${ddd}${hh}0000FHR2" + for fhr in ${fcst_hhh[@]} ; do + fns_on_disk+=(${disk_tmpl/FHR3/$fhr}) + done fi - fns_on_disk=( "${fns[@]/#/$prefix}" ) - - prefix="gfs.t${hh}z.atmf" - fns_in_arcv=( "${fns[@]/#/$prefix}" ) - - fi - ;; - - "RAP") -# -# Note that this is GSL RAPX data, not operational NCEP RAP data. An option for the latter -# may be added in the future. -# - fcst_hh=( $( printf "%02d " "${lbc_spec_fhrs[@]}" ) ) - - if [ "${MACHINE}" = "JET" ]; then - prefix="wrfnat_130_" - suffix=".grib2" - else - prefix="${yy}${ddd}${hh}${mn}" - suffix="${fcst_mn}" - fi - fns_on_disk=( "${fcst_hh[@]/#/$prefix}" ) - fns_on_disk=( "${fns_on_disk[@]/%/$suffix}" ) - - prefix="${yy}${ddd}${hh}${mn}" - fns_in_arcv=( "${fcst_hh[@]/#/$prefix}" ) - suffix="${fcst_mn}" - fns_in_arcv=( "${fns_in_arcv[@]/%/$suffix}" ) - ;; - - "HRRR") -# -# Note that this is GSL HRRRX data, not operational NCEP HRRR data. An option for the latter -# may be added in the future. -# - fcst_hh=( $( printf "%02d " "${lbc_spec_fhrs[@]}" ) ) - - if [ "${MACHINE}" = "JET" ]; then - prefix="wrfnat_hrconus_" - suffix=".grib2" - else - prefix="${yy}${ddd}${hh}${mn}" - suffix="${fcst_mn}" - fi - fns_on_disk=( "${fcst_hh[@]/#/$prefix}" ) - fns_on_disk=( "${fns_on_disk[@]/%/$suffix}" ) + ;; - prefix="${yy}${ddd}${hh}${mn}" - fns_in_arcv=( "${fcst_hh[@]/#/$prefix}" ) - suffix="${fcst_mn}" - fns_in_arcv=( "${fns_in_arcv[@]/%/$suffix}" ) - ;; + "NAM") + fn_tmpl="nam.t${hh}z.bgrdsfFHR3" + ;; - "NAM") - fcst_hhh=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) - prefix="nam.t${hh}z.bgrdsf" - fns=( "${fcst_hhh[@]/#/$prefix}" ) - suffix="" - fns_on_disk=( "${fns[@]/%/$suffix}" ) - fns_in_arcv=( "${fns[@]/%/$suffix}" ) - ;; - - *) - if [ "${USE_USER_STAGED_EXTRN_FILES}" != "TRUE" ]; then - print_err_msg_exit "\ + *) + if [ "${USE_USER_STAGED_EXTRN_FILES}" != "TRUE" ]; then + print_err_msg_exit "\ The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): extrn_mdl_name = \"${extrn_mdl_name}\" anl_or_fcst = \"${anl_or_fcst}\"" - fi - ;; + fi + ;; - esac + esac # End external model case for FCST files ;; + esac # End ANL FCST case + + # + # Expand the archive file names for all forecast hours + # + if [ ${anl_or_fcst} = FCST ] ; then + if [[ $fn_tmpl =~ FHR3 ]] ; then + fhrs=( $( printf "%03d " "${lbc_spec_fhrs[@]}" ) ) + tmpl=FHR3 + elif [[ ${fn_tmpl} =~ FHR2 ]] ; then + fhrs=( $( printf "%02d " "${lbc_spec_fhrs[@]}" ) ) + tmpl=FHR2 + else + print_err_msg_exit "\ + Forecast file name templates are expected to contain a template + string, either FHR2 or FHR3" + fi + for fhr in ${fhrs[@]}; do + fns_in_arcv+=(${fn_tmpl/$tmpl/$fhr}) + done + fi - esac -# -#----------------------------------------------------------------------- -# -# Set the system directory (i.e. a directory on disk) in which the external -# model output files for the specified cycle date (cdate) may be located. -# Note that this will be used by the calling script only if the output -# files for the specified cdate actually exist at this location. Otherwise, -# the files will be searched for on the mass store (HPSS). -# -#----------------------------------------------------------------------- -# + # Make sure all filenames variables are set. + if [ -z $fns_in_arcv ] ; then + print_err_msg_exit "\ + The script has not set \$fns_in_arcv properly" + fi + + if [ -z ${fns_on_disk:-} ] ; then + fns_on_disk=(${fns_in_arcv[@]}) + fi + # + #----------------------------------------------------------------------- + # + # Set the system directory (i.e. a directory on disk) in which the external + # model output files for the specified cycle date (cdate) may be located. + # Note that this will be used by the calling script only if the output + # files for the specified cdate actually exist at this location. Otherwise, + # the files will be searched for on the mass store (HPSS). + # + #----------------------------------------------------------------------- + # if [ "${anl_or_fcst}" = "ANL" ]; then - sysbasedir="${EXTRN_MDL_SYSBASEDIR_ICS}" + sysbasedir=${EXTRN_MDL_SYSBASEDIR_ICS} elif [ "${anl_or_fcst}" = "FCST" ]; then - sysbasedir="${EXTRN_MDL_SYSBASEDIR_LBCS}" + sysbasedir=${EXTRN_MDL_SYSBASEDIR_LBCS} fi - sysdir="" - case "${extrn_mdl_name}" in - -# -# It is not clear which, if any, systems the (old) spectral GFS model is -# available on, so set sysdir for this external model to a null string. -# - "GSMGFS") - case "$MACHINE" in - "WCOSS_CRAY") - sysdir="" - ;; - "WCOSS_DELL_P3") - sysdir="" - ;; - "HERA") - sysdir="" - ;; - "ORION") - sysdir="$sysbasedir" - ;; - "JET") - sysdir="" - ;; - "ODIN") - sysdir="$sysbasedir" - ;; - "CHEYENNE") - sysdir="" - ;; - "STAMPEDE") - sysdir="$sysbasedir" - ;; - *) - quit_unless_user_spec_data - ;; - esac - ;; - - - "FV3GFS") - case "$MACHINE" in - "WCOSS_CRAY") - sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" - ;; - "WCOSS_DELL_P3") - sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" - ;; - "HERA") - sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" - ;; - "ORION") - sysdir="$sysbasedir" - ;; - "JET") - sysdir="$sysbasedir" - ;; - "ODIN") - sysdir="$sysbasedir/${yyyymmdd}" - ;; - "CHEYENNE") - sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}" - ;; - "STAMPEDE") - sysdir="$sysbasedir" - ;; - *) - quit_unless_user_spec_data - ;; - esac - ;; - - - "RAP") - case "$MACHINE" in - "WCOSS_CRAY") - sysdir="$sysbasedir" - ;; - "WCOSS_DELL_P3") - sysdir="$sysbasedir" - ;; - "HERA") - sysdir="$sysbasedir" - ;; - "ORION") - sysdir="$sysbasedir" - ;; - "JET") - sysdir="$sysbasedir/${yyyymmdd}${hh}/postprd" - ;; - "ODIN") - sysdir="$sysbasedir" - ;; - "CHEYENNE") - sysdir="$sysbasedir" - ;; - *) - quit_unless_user_spec_data - ;; - esac - ;; + sysdir=$sysbasedir + # Use the basedir unless otherwise specified for special platform + # cases below. + if [ -n "${sysbasedir}" ] ; then + case "${extrn_mdl_name}" in + "FV3GFS") + case "$MACHINE" in + "WCOSS_CRAY") + sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" + ;; + "WCOSS_DELL_P3") + sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" + ;; + "HERA") + sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}/atmos" + ;; + "ODIN") + sysdir="$sysbasedir/${yyyymmdd}" + ;; + "CHEYENNE") + sysdir="$sysbasedir/gfs.${yyyymmdd}/${hh}" + ;; + esac + ;; + + "RAP") + case "$MACHINE" in + "JET") + sysdir="$sysbasedir/${yyyymmdd}${hh}/postprd" + ;; + esac + ;; + + "HRRR") + case "$MACHINE" in + "JET") + sysdir="$sysbasedir/${yyyymmdd}${hh}/postprd" + ;; + esac + ;; - "HRRR") - case "$MACHINE" in - "WCOSS_CRAY") - sysdir="$sysbasedir" - ;; - "WCOSS_DELL_P3") - sysdir="$sysbasedir" - ;; - "HERA") - sysdir="$sysbasedir" - ;; - "ORION") - sysdir="$sysbasedir" - ;; - "JET") - sysdir="$sysbasedir/${yyyymmdd}${hh}/postprd" - ;; - "ODIN") - sysdir="$sysbasedir" - ;; - "CHEYENNE") - sysdir="$sysbasedir" - ;; - *) - quit_unless_user_spec_data - ;; esac - ;; - - "NAM") - case "$MACHINE" in - "WCOSS_CRAY") - sysdir="$sysbasedir" - ;; - "WCOSS_DELL_P3") - sysdir="$sysbasedir" - ;; - "HERA") - sysdir="$sysbasedir" - ;; - "ORION") - sysdir="$sysbasedir" - ;; - "JET") - sysdir="$sysbasedir" - ;; - "ODIN") - sysdir="$sysbasedir" - ;; - "CHEYENNE") - sysdir="$sysbasedir" - ;; - *) - quit_unless_user_spec_data - ;; - esac - ;; - - - *) - quit_unless_user_spec_data - esac -# -#----------------------------------------------------------------------- -# -# Set parameters associated with the mass store (HPSS) for the specified -# cycle date (cdate). These consist of: -# -# 1) The type of the archive file (e.g. tar, zip, etc). -# 2) The name of the archive file. -# 3) The full path in HPSS to the archive file. -# 4) The relative directory in the archive file in which the model output -# files are located. -# -# Note that these will be used by the calling script only if the archive -# file for the specified cdate actually exists on HPSS. -# -#----------------------------------------------------------------------- -# + fi + # + #----------------------------------------------------------------------- + # + # Set parameters associated with the mass store (HPSS) for the specified + # cycle date (cdate). These consist of: + # + # 1) The type of the archive file (e.g. tar, zip, etc). + # 2) The name of the archive file. + # 3) The full path in HPSS to the archive file. + # 4) The relative directory in the archive file in which the model output + # files are located. + # + # Note that these will be used by the calling script only if the archive + # file for the specified cdate actually exists on HPSS. + # + #----------------------------------------------------------------------- + # case "${extrn_mdl_name}" in "GSMGFS") @@ -890,30 +595,29 @@ bination of external model (extrn_mdl_name) and analysis or forecast "RAP") -# -# Note that this is GSL RAPX data, not operational NCEP RAP data. An option for the latter -# may be added in the future. -# -# The zip archive files for RAPX are named such that the forecast files -# for odd-numbered starting hours (e.g. 01, 03, ..., 23) are stored -# together with the forecast files for the corresponding preceding even- -# numbered starting hours (e.g. 00, 02, ..., 22, respectively), in an -# archive file whose name contains only the even-numbered hour. Thus, -# in forming the name of the archive file, if the starting hour (hh) is -# odd, we reduce it by one to get the corresponding even-numbered hour -# and use that to form the archive file name. -# + # + # Note that this is GSL RAPX data, not operational NCEP RAP data. + # An option for the latter may be added in the future. + # + # The zip archive files for RAPX are named such that the forecast + # files for odd-numbered starting hours (e.g. 01, 03, ..., 23) are + # stored together with the forecast files for the corresponding + # preceding even numbered starting hours (e.g. 00, 02, ..., 22, + # respectively), in an archive file whose name contains only the + # even-numbered hour. Thus, in forming the name of the archive + # file, if the starting hour (hh) is odd, we reduce it by one to get + # the corresponding even-numbered hour and use that to form the + # archive file name. + # + # Convert hh to a decimal (i.e. base-10) number to ovoid octal + # interpretation in bash. + hh_orig=$hh -# Convert hh to a decimal (i.e. base-10) number. We need this because -# if it starts with a 0 (e.g. 00, 01, ..., 09), bash will treat it as an -# octal number, and 08 and 09 are illegal ocatal numbers for which the -# arithmetic operations below will fail. hh=$((10#$hh)) if [ $(($hh%2)) = 1 ]; then hh=$((hh-1)) fi -# Now that the arithmetic is done, recast hh as a two-digit string because -# that is needed in constructing the names below. + # Archive files use 2-digit forecast hour hh=$( printf "%02d\n" $hh ) arcv_dir="/BMC/fdr/Permanent/${yyyy}/${mm}/${dd}/data/fsl/rap/full/wrfnat" @@ -921,17 +625,16 @@ bination of external model (extrn_mdl_name) and analysis or forecast arcv_fns="${yyyy}${mm}${dd}${hh}00.${arcv_fmt}" arcv_fps="${arcv_dir}/${arcv_fns}" arcvrel_dir="" -# -# Reset hh to its original value in case it is used again later below. -# + + # Reset hh to its original value hh=${hh_orig} ;; "HRRR") -# -# Note that this is GSL HRRRX data, not operational NCEP HRRR data. An option for the latter -# may be added in the future. -# + # + # Note that this is GSL HRRRX data, not operational NCEP HRRR data. + # An option for the latter may be added in the future. + # arcv_dir="/BMC/fdr/Permanent/${yyyy}/${mm}/${dd}/data/fsl/hrrr/conus/wrfnat" arcv_fmt="zip" arcv_fns="${yyyy}${mm}${dd}${hh}00.${arcv_fmt}" @@ -954,23 +657,23 @@ Archive file information has not been specified for this external model: ;; esac -# -# Depending on the experiment configuration, the above code may set -# arcv_fns and arcv_fps to either scalars or arrays. If they are not -# arrays, recast them as arrays because that is what is expected in the -# code below. -# + # + # Depending on the experiment configuration, the above code may set + # arcv_fns and arcv_fps to either scalars or arrays. If they are not + # arrays, recast them as arrays because that is what is expected in + # the code below. + # is_array arcv_fns || arcv_fns=( "${arcv_fns}" ) is_array arcv_fps || arcv_fps=( "${arcv_fps}" ) -# -#----------------------------------------------------------------------- -# -# Use the eval function to set the output variables. Note that each of -# these is set only if the corresponding input variable specifying the -# name to use for the output variable is not empty. -# -#----------------------------------------------------------------------- -# + # + #----------------------------------------------------------------------- + # + # Use the eval function to set the output variables. Note that each + # of these is set only if the corresponding input variable specifying + # the name to use for the output variable is not empty. + # + #----------------------------------------------------------------------- + # if [ ! -z "${varname_extrn_mdl_cdate}" ]; then eval ${varname_extrn_mdl_cdate}="${cdate}" fi @@ -1011,12 +714,12 @@ Archive file information has not been specified for this external model: if [ ! -z "${varname_extrn_mdl_arcvrel_dir}" ]; then eval ${varname_extrn_mdl_arcvrel_dir}="${arcvrel_dir}" fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# + # + #----------------------------------------------------------------------- + # + # Restore the shell options saved at the beginning of this script/function. + # + #----------------------------------------------------------------------- + # { restore_shell_opts; } > /dev/null 2>&1 } diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index c40e3a128..e315c240b 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -1,81 +1,52 @@ # #----------------------------------------------------------------------- # -# This file defines and then calls a function that sets parameters -# associated with the external model used for initial conditions (ICs) -# and the one used for lateral boundary conditions (LBCs). +# This file defines and then calls a function that sets known locations +# of files on supported platforms. # #----------------------------------------------------------------------- # -function set_extrn_mdl_params() { -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# -local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Set the system directory (i.e. location on disk, not on HPSS) in which -# the files generated by the external model specified by EXTRN_MDL_NAME_ICS -# that are necessary for generating initial condition (IC) and surface -# files for the FV3SAR are stored (usually for a limited time, e.g. for -# the GFS external model, 2 weeks on WCOSS and 2 days on hera). If for -# a given cycle these files are available in this system directory, they -# will be copied over to a subdirectory under the cycle directory. If -# these files are not available in the system directory, then we search -# for them elsewhere, e.g. in the mass store (HPSS). -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "nco" ]; then - - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-$COMINgfs}" - -else - - case "${EXTRN_MDL_NAME_ICS}" in +function set_known_sys_dir() { + + # Usage: + # set_known_sys_dir model + # + # model is the name of the external model + # + local known_sys_dir model_name + + model=$1 + # + #----------------------------------------------------------------------- + # + # Set the system directory (i.e. location on disk, not on HPSS) in + # which the files generated by the external model specified by + # EXTRN_MDL_NAME_ICS that are necessary for generating initial + # condition (IC) and surface files for the FV3SAR are stored (usually + # for a limited time, e.g. for the GFS external model, 2 weeks on + # WCOSS and 2 days on hera). If for a given cycle these files are + # available in this system directory, they will be copied over to a + # subdirectory under the cycle directory. If these files are not + # available in the system directory, then we search for them + # elsewhere, e.g. in the mass store (HPSS). + # + #----------------------------------------------------------------------- + # + + # Set some default known locations on supported platforms. Not all + # platforms have known input locations + case "${model}" in "GSMGFS") case "$MACHINE" in - "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; "ODIN") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch/ywang/EPIC/GDAS/2019053000_mem001}" + known_sys_dir=/scratch/ywang/EPIC/GDAS/2019053000_mem001 ;; "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/glade/p/ral/jntp/UFS_CAM/COMGFS}" + known_sys_dir=/glade/p/ral/jntp/UFS_CAM/COMGFS ;; "STAMPEDE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001}" + known_sys_dir=/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001 ;; esac ;; @@ -83,28 +54,24 @@ else "FV3GFS") case "$MACHINE" in "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/dell1/nco/ops/com/gfs/prod}" - ;; + ;& # Fall through "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/dell1/nco/ops/com/gfs/prod}" + known_sys_dir=/gpfs/dell1/nco/ops/com/gfs/prod ;; "HERA") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch1/NCEPDEV/rstprod/com/gfs/prod}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" + known_sys_dir=/scratch1/NCEPDEV/rstprod/com/gfs/prod ;; "JET") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/public/data/grids/gfs/nemsio}" + known_sys_dir=/public/data/grids/gfs/nemsio ;; "ODIN") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch/ywang/test_runs/FV3_regional/gfs}" + known_sys_dir=/scratch/ywang/test_runs/FV3_regional/gfs ;; "STAMPEDE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001}" + known_sys_dir=/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001 ;; "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/glade/p/ral/jntp/UFS_CAM/COMGFS}" + known_sys_dir=/glade/p/ral/jntp/UFS_CAM/COMGFS} ;; esac ;; @@ -112,22 +79,9 @@ else "RAP") case "$MACHINE" in "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/hps/nco/ops/com/rap/prod}" - ;; + ;& # Fall through "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/hps/nco/ops/com/rap/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch2/BMC/public/data/gsd/rap/full/wrfnat}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/misc/whome/rtrr/rap}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-dummy_value}" + known_sys_dir=/gpfs/hps/nco/ops/com/rap/prod ;; esac ;; @@ -135,22 +89,9 @@ else "HRRR") case "$MACHINE" in "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/hps/nco/ops/com/hrrr/prod}" - ;; + ;& # Fall through "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/hps/nco/ops/com/hrrr/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/scratch2/BMC/public/data/gsd/hrrr/conus/wrfnat}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/misc/whome/rtrr/hrrr}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-dummy_value}" + known_sys_dir=/gpfs/hps/nco/ops/com/hrrr/prod ;; esac ;; @@ -158,216 +99,53 @@ else "NAM") case "$MACHINE" in "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/dell1/nco/ops/com/nam/prod}" - ;; + ;& # Fall through "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-/gpfs/dell1/nco/ops/com/nam/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-dummy_value}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-dummy_value}" + known_sys_dir=/gpfs/dell1/nco/ops/com/nam/prod ;; esac ;; esac -fi -# -# If EXTRN_MDL_SYSBASEDIR_ICS has not been set (not even to a null string), -# print out an error message and exit. -# -if [ -z "${EXTRN_MDL_SYSBASEDIR_ICS+x}" ]; then - print_err_msg_exit "\ -The variable EXTRN_MDL_SYSBASEDIR_ICS specifying the system directory -in which to look for the files generated by the external model for ICs -has not been set for the current combination of machine (MACHINE) and -external model (EXTRN_MDL_NAME_ICS): - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" -fi -# -#----------------------------------------------------------------------- -# -# Set EXTRN_MDL_LBCS_OFFSET_HRS, which is the number of hours to shift -# the starting time of the external model that provides lateral boundary -# conditions. -# -#----------------------------------------------------------------------- -# -case "${EXTRN_MDL_NAME_LBCS}" in - "GSMGFS") - EXTRN_MDL_LBCS_OFFSET_HRS="0" - ;; - "FV3GFS") - EXTRN_MDL_LBCS_OFFSET_HRS="0" - ;; - "RAP") - EXTRN_MDL_LBCS_OFFSET_HRS="3" - ;; - "HRRR") - EXTRN_MDL_LBCS_OFFSET_HRS="0" - ;; - "NAM") - EXTRN_MDL_LBCS_OFFSET_HRS="0" - ;; -esac -# -#----------------------------------------------------------------------- -# -# Set the system directory (i.e. location on disk, not on HPSS) in which -# the files generated by the external model specified by EXTRN_MDL_NAME_LBCS -# that are necessary for generating lateral boundary condition (LBC) files -# for the FV3SAR are stored (usually for a limited time, e.g. for the GFS -# external model, 2 weeks on WCOSS and 2 days on hera). If for a given -# cycle these files are available in this system directory, they will be -# copied over to a subdirectory under the cycle directory. If these files -# are not available in the system directory, then we search for them -# elsewhere, e.g. in the mass store (HPSS). -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "nco" ]; then - - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-$COMINgfs}" - -else + echo $known_sys_dir +} +function set_extrn_mdl_params() { + # + #----------------------------------------------------------------------- + # + # Use known locations or COMINgfs as default, depending on RUN_ENVIR + # + #----------------------------------------------------------------------- + # + if [ "${RUN_ENVIR}" = "nco" ]; then + EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-$COMINgfs}" + EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-$COMINgfs}" + else + EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS:-$(set_known_sys_dir \ + ${EXTRN_MDL_NAME_ICS})}" + EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-$(set_known_sys_dir \ + ${EXTRN_MDL_NAME_LBCS})}" + fi + + # + #----------------------------------------------------------------------- + # + # Set EXTRN_MDL_LBCS_OFFSET_HRS, which is the number of hours to shift + # the starting time of the external model that provides lateral boundary + # conditions. + # + #----------------------------------------------------------------------- + # case "${EXTRN_MDL_NAME_LBCS}" in - - "GSMGFS") - case "$MACHINE" in - "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "ODIN") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch/ywang/EPIC/GDAS/2019053000_mem001}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/glade/p/ral/jntp/UFS_CAM/COMGFS}" - ;; - "STAMPEDE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001}" - ;; - esac - ;; - - "FV3GFS") - case "$MACHINE" in - "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/dell1/nco/ops/com/gfs/prod}" - ;; - "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/dell1/nco/ops/com/gfs/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch1/NCEPDEV/rstprod/com/gfs/prod}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/public/data/grids/gfs/nemsio}" - ;; - "ODIN") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch/ywang/test_runs/FV3_regional/gfs}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/glade/p/ral/jntp/UFS_CAM/COMGFS}" - ;; - "STAMPEDE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch/00315/tg455890/GDAS/20190530/2019053000_mem001}" - ;; - esac - ;; - - "RAP") - case "$MACHINE" in - "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/hps/nco/ops/com/rap/prod}" - ;; - "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/hps/nco/ops/com/rap/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch2/BMC/public/data/gsd/rap/full/wrfnat}" - ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/misc/whome/rtrr/rap}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-dummy_value}" + "RAP") + EXTRN_MDL_LBCS_OFFSET_HRS=${EXTRN_MDL_LBCS_OFFSET_HRS:-"3"} ;; - esac - ;; - - "HRRR") - case "$MACHINE" in - "WCOSS_CRAY") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/hps/nco/ops/com/hrrr/prod}" - ;; - "WCOSS_DELL_P3") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/gpfs/hps/nco/ops/com/hrrr/prod}" - ;; - "HERA") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/scratch2/BMC/public/data/gsd/hrrr/conus/wrfnat}" + "*") + EXTRN_MDL_LBCS_OFFSET_HRS=${EXTRN_MDL_LBCS_OFFSET_HRS:-"0"} ;; - "ORION") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-}" - ;; - "JET") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-/misc/whome/rtrr/hrrr}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-dummy_value}" - ;; - esac - ;; - - "NAM") - case "$MACHINE" in - "HERA") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-dummy_value}" - ;; - "CHEYENNE") - EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS:-dummy_value}" - ;; - esac - ;; - esac - -fi -# -# If EXTRN_MDL_SYSBASEDIR_LBCS has not been set (not even to a null string), -# print out an error message and exit. -# -if [ -z "${EXTRN_MDL_SYSBASEDIR_LBCS+x}" ]; then - print_err_msg_exit "\ -The variable EXTRN_MDL_SYSBASEDIR_LBCS specifying the system directory -in which to look for the files generated by the external model for LBCs -has not been set for the current combination of machine (MACHINE) and -external model (EXTRN_MDL_NAME_LBCS): - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" -fi } # #----------------------------------------------------------------------- From 448a421399ca66c0c22c8dfb793d64ef391cd3ab Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Thu, 13 Jan 2022 09:24:27 -0700 Subject: [PATCH 12/15] Enhance ability to use template variables (#650) ## DESCRIPTION OF CHANGES: 1. Enhance ability to use template variables in the experiment configuration file (either in the default configuration file `config_defaults.sh` or the user configuration file `config.sh`). 2. Modify WE2E test system to include test of template variable use. 3. Fix bugs. ### Notes on template variables: A template variable (or simply a template) is an experiment variable that contains in its definition a reference to another variable(s). The referenced variable can be another experiment variable (i.e. one that is defined in `var_defns.sh`), or it can be a local variable (i.e. one that is not defined in `var_defns.sh` but in the script or function that sources `var_defns.sh` and uses the template). For example, a template named `TEMPL_VAR` my be defined in `config_defaults.sh` or `config.sh` as `TEMPL_VAR='cd ${some_dir}'` where `some_dir` may be an experiment variable or a local variable. `TEMPL_VAR` can then be evaluated using bash's `eval` built-in command in a script or function that first sources `var_defns.sh` and, if necessary, defines `some_dir`. Note that single quotes must be used on the right-hand side to avoid expansion of `${some_dir}` before run time (i.e. when `eval` is called on `TEMPL_VAR`). For details, see the documentation added in PR #[198](https://github.com/ufs-community/ufs-srweather-app/pull/198). ### Changes to WE2E tests: * Modify the WE2E test configuration file `config.deactivate_tasks.sh` to include template variables. `deactivate_tasks` now serves as a test of both deactivating tasks and of using template variables. * Add `template_vars` as an alternate test name for `deactivate_tasks` (by creating a symlink named `config.template_vars.sh` that points to `config.deactivate_tasks.sh`). ### Bug fixes: * In `get_WE2Etest_names_subdirs_descs.sh`, change the variable `alt_test_subdirs` to `alt_test_names` at a single location. * In `setup.sh`, set `BUILD_ENV_FN` and `WFLOW_ENV_FN` (instead of in `load_modules_run_task.sh` and `launch_FV3LAM_wflow.sh`, respectively). This way, these variables will have the correct values in `var_defns.sh`. * In `get_expts_status.sh`, fix the way `homerrfs` is calculated. ## TESTS CONDUCTED: The WE2E tests `grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2` and `template_vars` were run on Hera. Both completed successfully. ## DOCUMENTATION: Documentation is added to the User's Guide via PR #[198](https://github.com/ufs-community/ufs-srweather-app/pull/198) into the ufs-srweather-app repo. ## Dependencies: PR #[198](https://github.com/ufs-community/ufs-srweather-app/pull/198) for the documentation. ## CONTRIBUTORS: @christinaholtNOAA and @mkavulich brought up the issue of templates as part of PR #[617](https://github.com/NOAA-EMC/regional_workflow/pull/617). --- .../WE2E/get_WE2Etest_names_subdirs_descs.sh | 4 +- tests/WE2E/get_expts_status.sh | 2 +- tests/WE2E/run_WE2E_tests.sh | 4 +- .../wflow_features/config.deactivate_tasks.sh | 46 +- .../wflow_features/config.template_vars.sh | 1 + ush/bash_utils/check_var_valid_value.sh | 28 +- ush/bash_utils/get_bash_file_contents.sh | 71 ++ ush/bash_utils/print_input_args.sh | 2 +- ush/check_expt_config_vars.sh | 110 +++ ush/compare_config_scripts.sh | 151 ---- ush/config_defaults.sh | 29 +- ush/generate_FV3LAM_wflow.sh | 4 +- ush/launch_FV3LAM_wflow.sh | 127 +-- ush/load_modules_run_task.sh | 3 +- ush/setup.sh | 742 ++++++++---------- ush/source_util_funcs.sh | 9 + 16 files changed, 685 insertions(+), 648 deletions(-) create mode 120000 tests/WE2E/test_configs/wflow_features/config.template_vars.sh create mode 100644 ush/bash_utils/get_bash_file_contents.sh create mode 100644 ush/check_expt_config_vars.sh delete mode 100644 ush/compare_config_scripts.sh diff --git a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh index 257cd8912..2183ead89 100755 --- a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh +++ b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh @@ -759,7 +759,7 @@ This is probably because it is a directory. Please correct and rerun." test_names=("${prim_test_names[@]}") test_subdirs=("${prim_test_subdirs[@]}") if [ "${num_alt_tests}" -gt "0" ]; then - test_names+=("${alt_test_subdirs[@]:-}") + test_names+=("${alt_test_names[@]:-}") test_subdirs+=("${alt_test_subdirs[@]:-}") fi # @@ -1025,7 +1025,7 @@ Please correct and rerun." # listed first. # # Finally, we extract from test_ids_and_inds_sorted the second number -# in each element (the one afte the first number, which is the test ID, +# in each element (the one after the first number, which is the test ID, # and the test type, which we no longer need), which is the original # array index before sorting, and save the results in the array sort_inds. # This array will contain the original indices in sorted order that we diff --git a/tests/WE2E/get_expts_status.sh b/tests/WE2E/get_expts_status.sh index 5c05acafb..997bfb6b8 100755 --- a/tests/WE2E/get_expts_status.sh +++ b/tests/WE2E/get_expts_status.sh @@ -50,7 +50,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -homerrfs=${scrfunc_dir%/*} +homerrfs=${scrfunc_dir%/*/*} # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index b8eaeec74..dd1f50613 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -538,8 +538,8 @@ accordingly and rerun." if [ "${match_found}" = "FALSE" ]; then avail_WE2E_test_names_str=$( printf " \"%s\"\n" "${avail_WE2E_test_names[@]}" ) print_err_msg_exit "\ -The name current user-specified test to run (user_spec_test) does not -match any of the names (either primary or alternate) of the available +The name of the current user-specified test to run (user_spec_test) does +not match any of the names (either primary or alternate) of the available WE2E tests: user_spec_test = \"${user_spec_test}\" Valid values for user_spec_test consist of the names (primary or alternate) diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh index 52aad5cfa..2375a648f 100644 --- a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh @@ -2,10 +2,26 @@ # TEST PURPOSE/DESCRIPTION: # ------------------------ # -# This test ensures that the various workflow tasks can be deactivated, -# i.e. removed from the Rocoto XML. Note that we leave the MAKE_GRID_TN, -# MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN activated because there is a -# separate test for turning those off. +# This test has two purposes: +# +# 1) It checks that the various workflow tasks can be deactivated, i.e. +# removed from the Rocoto XML. +# 2) It checks the capability of the workflow to use "template" experiment +# variables, i.e. variables whose definitions include references to +# other variables, e.g. +# +# MY_VAR='\${ANOTHER_VAR}' +# +# Note that we do not deactivate all tasks in the workflow; we leave the +# MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN activated because: +# +# 1) There is already a WE2E test that runs with these three tasks +# deactivated (that test is to ensure that pre-generated grid, +# orography, and surface climatology files can be used). +# 2) In checking the template variable capability, we want to make sure +# that the variable defintions file (GLOBAL_VAR_DEFNS_FN) generated +# does not have syntax or other errors in it by sourcing it in these +# three tasks. # RUN_ENVIR="community" @@ -14,13 +30,31 @@ PREEXISTING_DIR_METHOD="rename" PREDEF_GRID_NAME="RRFS_CONUS_25km" CCPP_PHYS_SUITE="FV3_GFS_v15p2" -DATE_FIRST_CYCL="20190615" -DATE_LAST_CYCL="20190615" +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" +USE_USER_STAGED_EXTRN_FILES="TRUE" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" CYCL_HRS=( "00" ) +FCST_LEN_HRS="6" +LBC_SPEC_INTVL_HRS="3" + RUN_TASK_GET_EXTRN_ICS="FALSE" RUN_TASK_GET_EXTRN_LBCS="FALSE" RUN_TASK_MAKE_ICS="FALSE" RUN_TASK_MAKE_LBCS="FALSE" RUN_TASK_RUN_FCST="FALSE" RUN_TASK_RUN_POST="FALSE" +# +# The following shows examples of how to define template variables. Here, +# we define RUN_CMD_UTILS, RUN_CMD_FCST, and RUN_CMD_POST as template +# variables. Note that during this test, these templates aren't actually +# expanded/used (something that would be done using bash's "eval" built-in +# command) anywhere in the scripts. They are included here only to verify +# that the test completes with some variables defined as templates. +# +RUN_CMD_UTILS='cd $yyyymmdd' +RUN_CMD_FCST='mpirun -np ${PE_MEMBER01}' +RUN_CMD_POST='echo hello $yyyymmdd' diff --git a/tests/WE2E/test_configs/wflow_features/config.template_vars.sh b/tests/WE2E/test_configs/wflow_features/config.template_vars.sh new file mode 120000 index 000000000..80ede5437 --- /dev/null +++ b/tests/WE2E/test_configs/wflow_features/config.template_vars.sh @@ -0,0 +1 @@ +config.deactivate_tasks.sh \ No newline at end of file diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 7a0e20707..576ad6b1b 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -108,24 +108,34 @@ The value specified in ${var_name} is not supported: # #----------------------------------------------------------------------- # -# Check whether var_value is equal to one of the elements of the array -# valid_var_values. If not, print out an error message and exit the -# calling script. +# If var_value contains a dollar sign, we assume the corresponding variable +# (var_name) is a template variable, i.e. one whose value contains a +# reference to another variable, e.g. +# +# MY_VAR='\${ANOTHER_VAR}' +# +# In this case, we do nothing since it does not make sense to check +# whether var_value is a valid value (since its contents have not yet +# been expanded). If var_value doesn't contain a dollar sign, it must +# contain a literal string. In this case, we check whether it is equal +# to one of the elements of the array valid_var_values. If not, we +# print out an error message and exit the calling script. # #----------------------------------------------------------------------- # - is_element_of "valid_var_values" "${var_value}" || { \ - valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); - print_err_msg_exit "\ + if [[ "${var_value}" != *'$'* ]]; then + is_element_of "valid_var_values" "${var_value}" || { \ + valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); + print_err_msg_exit "\ ${err_msg} ${var_name} must be set to one of the following: ${valid_var_values_str}"; \ - } + } + fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Restore the shell options saved at the beginning of this script/function. # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/get_bash_file_contents.sh b/ush/bash_utils/get_bash_file_contents.sh new file mode 100644 index 000000000..3b3ab7b30 --- /dev/null +++ b/ush/bash_utils/get_bash_file_contents.sh @@ -0,0 +1,71 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that returns the contents of a bash script/ +# function with all empty lines, comment lines, and leading and trailing +# whitespace removed. Arguments are as follows: +# +# fp: +# The relative or full path to the file containing the bash script or +# function. +# +# output_varname_contents: +# Name of the output variable that will contain the (processed) contents +# of the file. This is the output of the function. +# +#----------------------------------------------------------------------- +# +function get_bash_file_contents() { + + { save_shell_opts; set -u +x; } > /dev/null 2>&1 + + local valid_args=( \ + "fp" \ + "output_varname_contents" \ + ) + process_args valid_args "$@" + print_input_args "valid_args" + # + # Verify that the required arguments to this function have been specified. + # If not, print out an error message and exit. + # + if [ -z "$fp" ]; then + print_err_msg_exit "\ +The argument \"fp\" specifying the relative or full path to the file to +read was not specified in the call to this function: + fp = \"$fp\"" + fi + + local contents \ + crnt_line + # + # Read in all lines in the file. In doing so: + # + # 1) Concatenate any line ending with the bash line continuation character + # (a backslash) with the following line. + # 2) Remove any leading and trailing whitespace. + # + # Note that these two actions are automatically performed by the "read" + # utility in the while-loop below. + # + contents="" + while read crnt_line; do + contents="${contents}${crnt_line} +" + done < "$fp" + # + # Strip out any comment and empty lines from contents. + # + contents=$( printf "${contents}" | \ + $SED -r -e "/^#.*/d" `# Remove comment lines.` \ + -e "/^$/d" `# Remove empty lines.` \ + ) + # + # Set output variables. + # + printf -v ${output_varname_contents} "${contents}" + + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/print_input_args.sh b/ush/bash_utils/print_input_args.sh index 957cec524..d5ba5bd53 100644 --- a/ush/bash_utils/print_input_args.sh +++ b/ush/bash_utils/print_input_args.sh @@ -169,7 +169,7 @@ have been set as follows: #----------------------------------------------------------------------- # # If a global variable named DEBUG is not defined, print out the message. -# If it is defined, print out the message only if DEBUG is set to TRUE. +# If it is defined, print out the message only if DEBUG is set to "TRUE". # #----------------------------------------------------------------------- # diff --git a/ush/check_expt_config_vars.sh b/ush/check_expt_config_vars.sh new file mode 100644 index 000000000..53ce13a09 --- /dev/null +++ b/ush/check_expt_config_vars.sh @@ -0,0 +1,110 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that checks that all experiment variables +# set in the user-specified experiment configuration file are defined (by +# being assigned default values) in the default experiment configuration +# file. If a variable is found in the former that is not defined in the +# latter, this function exits with an error message. +# +# This check is performed in order to prevent the user from defining +# arbitrary variables in the user-specified configuration file; the +# latter should be used to specify only varaibles that have already been +# defined in the default configuration file. +# +# Arguments are as follows: +# +# default_config_fp: +# The relative or full path to the default experiment configuration file. +# +# config_fp: +# The relative or full path to the user-specified experiment configuration +# file. +# +#----------------------------------------------------------------------- +# +function check_expt_config_vars() { + + . ${scrfunc_dir}/source_util_funcs.sh + + { save_shell_opts; set -u +x; } > /dev/null 2>&1 + + local valid_args=( \ + "default_config_fp" \ + "config_fp" \ + ) + process_args valid_args "$@" + print_input_args "valid_args" + + local var_list_default \ + var_list_user \ + crnt_line \ + var_name \ + regex_search + # + # Get the list of variable definitions, first from the default experiment + # configuration file and then from the user-specified experiment + # configuration file. + # + get_bash_file_contents fp="${default_config_fp}" \ + output_varname_contents="var_list_default" + + get_bash_file_contents fp="${config_fp}" \ + output_varname_contents="var_list_user" + # + # Loop through each line/variable in var_list_user. For each line, + # extract the the name of the variable that is being set (say VAR) and + # check that this variable is set somewhere in the default configuration + # file by verifying that a line that starts with "VAR=" exists in + # var_list_default. + # + while read crnt_line; do + # + # Note that a variable name will be found only if the equal sign immediately + # follows the variable name. + # + var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ =\"]*)=.*/\1/p") + + if [ -z "${var_name}" ]; then + + print_info_msg " +The current line (crnt_line) of the user-specified experiment configuration +file (config_fp) does not contain a variable name (i.e. var_name is empty): + config_fp = \"${config_fp}\" + crnt_line = \"${crnt_line}\" + var_name = \"${var_name}\" +Skipping to next line." + + else + # + # Use grep to search for the variable name (followed by an equal sign, + # all at the beginning of a line) in the list of variables in the default + # configuration file. + # + # Note that we use a herestring to input into grep the list of variables + # in the default configuration file. grep will return with a zero status + # if the specified string (regex_search) is not found in the default + # variables list and a nonzero status otherwise. Note also that we + # redirect the output of grep to null because we are only interested in + # its exit status. + # + regex_search="^${var_name}=" + grep "${regex_search}" <<< "${var_list_default}" > /dev/null 2>&1 || \ + print_err_msg_exit "\ +The variable (var_name) defined on the current line (crnt_line) of the +user-specified experiment configuration file (config_fp) does not appear +in the default experiment configuration file (default_config_fp): + config_fp = \"${config_fp}\" + default_config_fp = \"${default_config_fp}\" + crnt_line = \"${crnt_line}\" + var_name = \"${var_name}\" +Please assign a default value to this variable in the default configuration +file and rerun." + + fi + + done <<< "${var_list_user}" + + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh deleted file mode 100644 index 791fa4e34..000000000 --- a/ush/compare_config_scripts.sh +++ /dev/null @@ -1,151 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that checks that all vari- -# ables defined in the user-specified experiment/workflow configuration -# file (whose file name is stored in the variable EXPT_CONFIG_FN) are -# also assigned default values in the default configuration file (whose -# file name is stored in the variable EXPT_DEFAULT_CONFIG_FN). -# -#----------------------------------------------------------------------- -# -function compare_config_scripts() { -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# -local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Source bash utility functions. -# -#----------------------------------------------------------------------- -# -. ${scrfunc_dir}/source_util_funcs.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Create a list of variable settings in the default workflow/experiment -# file (script) by stripping out comments, blank lines, extraneous lead- -# ing whitespace, etc from that file and saving the result in the varia- -# ble var_list_default. Each line of var_list_default will have the -# form -# -# VAR=... -# -# where the VAR is a variable name and ... is the value (including any -# trailing comments). Then create an equivalent list for the local con- -# figuration file and save the result in var_list_local. -# -#----------------------------------------------------------------------- -# -var_list_default=$( \ -$SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${EXPT_DEFAULT_CONFIG_FN} \ -) - -var_list_local=$( \ -$SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${EXPT_CONFIG_FN} \ -) -# -#----------------------------------------------------------------------- -# -# Loop through each line of var_list_local. For each line, extract the -# the name of the variable that is being set (say VAR) and check that -# this variable is set somewhere in the default configuration file by -# verifying that a line that starts with "VAR=" exists in var_list_de- -# fault. -# -#----------------------------------------------------------------------- -# -while read crnt_line; do -# -# Note that a variable name will be found only if the equal sign immed- -# iately follows the variable name. -# - var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ =\"]*)=.*/\1/p") - - if [ -z "${var_name}" ]; then - - print_info_msg " -Current line (crnt_line) of user-specified experiment/workflow configu- -ration file (EXPT_CONFIG_FN) does not contain a variable name (i.e. -var_name is empty): - EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" - crnt_line = \"${crnt_line}\" - var_name = \"${var_name}\" -Skipping to next line." - - else -# -# Use a herestring to input list of variables in the default configura- -# tion file to grep. Also, redirect the output to null because we are -# only interested in the exit status of grep (which will be nonzero if -# the specified regex was not found in the list).. -# - grep "^${var_name}=" <<< "${var_list_default}" > /dev/null 2>&1 || \ - print_err_msg_exit "\ -The variable specified by var_name in the user-specified experiment/ -workflow configuration file (EXPT_CONFIG_FN) does not appear in the de- -fault experiment/workflow configuration file (EXPT_DEFAULT_CONFIG_FN): - EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" - EXPT_DEFAULT_CONFIG_FN = \"${EXPT_DEFAULT_CONFIG_FN}\" - var_name = \"${var_name}\" -Please assign a default value to this variable in the default configura- -tion file and rerun." - - fi - -done <<< "${var_list_local}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - -} -# -#----------------------------------------------------------------------- -# -# Call the function defined above. -# -#----------------------------------------------------------------------- -# -compare_config_scripts - diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 21d6096a1..afb942856 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -152,13 +152,14 @@ QUEUE_FCST="" # will be ignored unless WORKFLOW_MANAGER="none". Definitions: # # RUN_CMD_UTILS: -# The run command for pre-processing utilities (shave, orog, sfc_climo_gen, etc.) -# Can be left blank for smaller domains, in which case the executables will run -# without MPI. +# The run command for pre-processing utilities (shave, orog, sfc_climo_gen, +# etc.) Can be left blank for smaller domains, in which case the executables +# will run without MPI. # # RUN_CMD_FCST: -# The run command for the model forecast step. This will be appended to the end -# of the variable definitions file, so it can reference other variables. +# The run command for the model forecast step. This will be appended to +# the end of the variable definitions file, so it can reference other +# variables. # # RUN_CMD_POST: # The run command for post-processing (UPP). Can be left blank for smaller @@ -167,7 +168,7 @@ QUEUE_FCST="" #----------------------------------------------------------------------- # RUN_CMD_UTILS="mpirun -np 1" -RUN_CMD_FCST="mpirun -np \${PE_MEMBER01}" +RUN_CMD_FCST='mpirun -np \${PE_MEMBER01}' RUN_CMD_POST="mpirun -np 1" # #----------------------------------------------------------------------- @@ -295,7 +296,7 @@ PTMP="/base/path/of/directory/containing/postprocessed/output/files" # #----------------------------------------------------------------------- # -# Set the sparator character(s) to use in the names of the grid, mosaic, +# Set the separator character(s) to use in the names of the grid, mosaic, # and orography fixed files. # # Ideally, the same separator should be used in the names of these fixed @@ -732,6 +733,7 @@ NOMADS_file_type="nemsio" # directory or the cycle directories under it. # #----------------------------------------------------------------------- +# CCPP_PHYS_SUITE="FV3_GFS_v15p2" # #----------------------------------------------------------------------- @@ -1754,9 +1756,9 @@ NUM_ENS_MEMBERS="1" # #----------------------------------------------------------------------- # -DO_SHUM="false" -DO_SPPT="false" -DO_SKEB="false" +DO_SHUM="FALSE" +DO_SPPT="FALSE" +DO_SKEB="FALSE" SHUM_MAG="0.006" #Variable "shum" in input.nml SHUM_LSCALE="150000" SHUM_TSCALE="21600" #Variable "shum_tau" in input.nml @@ -1770,7 +1772,7 @@ SKEB_LSCALE="150000" SKEB_TSCALE="21600" #Variable "skeb_tau" in input.nml SKEB_INT="3600" #Variable "skebint" in input.nml SKEB_VDOF="10" -USE_ZMTNBLCK="false" +USE_ZMTNBLCK="FALSE" # #----------------------------------------------------------------------- # @@ -1787,7 +1789,7 @@ USE_ZMTNBLCK="false" # #----------------------------------------------------------------------- # -DO_SPP="false" +DO_SPP="FALSE" SPP_VAR_LIST=( "pbl" ) SPP_MAG_LIST=( "0.2" ) #Variable "spp_prt_list" in input.nml SPP_LSCALE=( "150000.0" ) @@ -1902,6 +1904,3 @@ OMP_STACKSIZE_RUN_FCST="1024m" KMP_AFFINITY_RUN_POST="scatter" OMP_NUM_THREADS_RUN_POST="1" OMP_STACKSIZE_RUN_POST="1024m" -# -#----------------------------------------------------------------------- -# diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index db65baec2..4776c3d45 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -1107,8 +1107,8 @@ if [[ $retval == 0 ]]; then # else printf " -Experiment/workflow generation failed. Check the log file from the ex- -periment/workflow generation script in the file specified by log_fp: +Experiment generation failed. Check the log file from the experiment +generation script in the file specified by log_fp: log_fp = \"${log_fp}\" Stopping. " diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh index 76c964083..4c881be2d 100755 --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -18,7 +18,13 @@ set -u #----------------------------------------------------------------------- # if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { echo >&2 "For Darwin-based operating systems (MacOS), the 'greadlink' utility is required to run the UFS SRW Application. Reference the User's Guide for more information about platform requirements. Aborting."; exit 1; } + command -v greadlink >/dev/null 2>&1 || { \ + echo >&2 "\ +For Darwin-based operating systems (MacOS), the 'greadlink' utility is +required to run the UFS SRW Application. Reference the User's Guide for +more information about platform requirements. Aborting."; \ + exit 1; \ + } scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) else scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) @@ -28,27 +34,26 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) ushdir="${scrfunc_dir}" . $ushdir/source_util_funcs.sh - # #----------------------------------------------------------------------- # # Get the experiment directory. We assume that there is a symlink to # this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is lo- -# cated will give us the experiment directory. We find this by first +# that symlink. Thus, finding the directory in which the symlink is +# located will give us the experiment directory. We find this by first # obtaining the directory portion (i.e. the portion without the name of # this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the correspond- -# ing absolute path. This will work for all four of the following ways -# in which the symlink in the experiment directory pointing to this -# script may be called: +# "$0") and then use the "readlink -f" command to obtain the corresponding +# absolute path. This will work for all four of the following ways in +# which the symlink in the experiment directory pointing to this script +# may be called: # # 1) Call this script from the experiment directory: # > cd /path/to/experiment/directory # > launch_FV3LAM_wflow.sh # -# 2) Call this script from the experiment directory but using "./" be- -# fore the script name: +# 2) Call this script from the experiment directory but using "./" before +# the script name: # > cd /path/to/experiment/directory # > ./launch_FV3LAM_wflow.sh # @@ -56,9 +61,8 @@ ushdir="${scrfunc_dir}" # symlink in the experiment directory: # > /path/to/experiment/directory/launch_FV3LAM_wflow.sh # -# 4) Call this script from a directory that is several levels up from -# the experiment directory (but not necessarily at the root directo- -# ry): +# 4) Call this script from a directory that is several levels up from the +# experiment directory (but not necessarily at the root directory): # > cd /path/to # > experiment/directory/launch_FV3LAM_wflow.sh # @@ -75,7 +79,13 @@ ushdir="${scrfunc_dir}" # exptdir=$( dirname "$0" ) if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { echo >&2 "For Darwin-based operating systems (MacOS), the 'greadlink' utility is required to run the UFS SRW Application. Reference the User's Guide for more information about platform requirements. Aborting."; exit 1; } + command -v greadlink >/dev/null 2>&1 || { \ + echo >&2 "\ +For Darwin-based operating systems (MacOS), the 'greadlink' utility is +required to run the UFS SRW Application. Reference the User's Guide for +more information about platform requirements. Aborting."; + exit 1; + } exptdir=$( greadlink -f "$exptdir" ) else exptdir=$( readlink -f "$exptdir" ) @@ -106,14 +116,12 @@ expt_name="${EXPT_SUBDIR}" # #----------------------------------------------------------------------- # -machine=$(echo_lowercase $MACHINE) -env_fn=${WFLOW_ENV_FN:-"wflow_${machine}.env"} -env_fp="${SR_WX_APP_TOP_DIR}/env/${env_fn}" +env_fp="${SR_WX_APP_TOP_DIR}/env/${WFLOW_ENV_FN}" module purge source "${env_fp}" || print_err_msg_exit "\ - Sourcing platform-specific environment file (env_fp) for -the workflow task failed : -env_fp = \"${env_fp}\"" +Sourcing platform-specific environment file (env_fp) for the workflow +task failed: + env_fp = \"${env_fp}\"" # #----------------------------------------------------------------------- # @@ -141,20 +149,19 @@ wflow_status="IN PROGRESS" # #----------------------------------------------------------------------- # -cd "$exptdir" +cd_vrfy "$exptdir" # #----------------------------------------------------------------------- # -# Issue the rocotorun command to (re)launch the next task in the -# workflow. Then check for error messages in the output of rocotorun. -# If any are found, it means the end-to-end run of the workflow failed. -# In this case, we remove the crontab entry that launches the workflow, -# and we append an appropriate failure message at the end of the launch -# log file. +# Issue the rocotorun command to (re)launch the next task in the workflow. +# Then check for error messages in the output of rocotorun. If any are +# found, it means the end-to-end run of the workflow failed. In this +# case, we remove the crontab entry that launches the workflow, and we +# append an appropriate failure message at the end of the launch log +# file. # #----------------------------------------------------------------------- # - tmp_fn="rocotorun_output.txt" rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 @@ -239,15 +246,15 @@ ${rocotostat_output} # ... # # Thus, the first row is a header line containing the column titles, and -# the remaining rows each correspond to one cycle in the workflow. Be- -# low, we are interested in the first and second columns of each row. -# The first column is a string containing the start time of the cycle -# (in the format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the -# 2-digit month, DD is the 2-digit day of the month, HH is the 2-digit -# hour of the day, and mm is the 2-digit minute of the hour). The se- -# cond column is a string containing the state of the cycle. This can -# be "Active" or "Done". Below, we read in and store these two columns -# in (1-D) arrays. +# the remaining rows each correspond to one cycle in the workflow. Below, +# we are interested in the first and second columns of each row. The +# first column is a string containing the start time of the cycle (in the +# format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the 2-digit +# month, DD is the 2-digit day of the month, HH is the 2-digit hour of +# the day, and mm is the 2-digit minute of the hour). The second column +# is a string containing the state of the cycle. This can be "Active" +# or "Done". Below, we read in and store these two columns in (1-D) +# arrays. # #----------------------------------------------------------------------- # @@ -259,9 +266,9 @@ cycle_status=() i=0 while read -r line; do # -# Note that the first line in rocotostat_output is a header line con- -# taining the column titles. Thus, we ignore it and consider only the -# remaining lines (of which there is one per cycle). +# Note that the first line in rocotostat_output is a header line containing +# the column titles. Thus, we ignore it and consider only the remaining +# lines (of which there is one per cycle). # if [ $i -gt 0 ]; then im1=$((i-1)) @@ -289,9 +296,9 @@ done # #----------------------------------------------------------------------- # -# If the number of completed cycles is equal to the total number of cy- -# cles, it means the end-to-end run of the workflow was successful. In -# this case, we reset the wflow_status to "SUCCESS". +# If the number of completed cycles is equal to the total number of cycles, +# it means the end-to-end run of the workflow was successful. In this +# case, we reset the wflow_status to "SUCCESS". # #----------------------------------------------------------------------- # @@ -301,8 +308,8 @@ fi # #----------------------------------------------------------------------- # -# Print informational messages about the workflow to the launch log -# file, including the workflow status. +# Print informational messages about the workflow to the launch log file, +# including the workflow status. # #----------------------------------------------------------------------- # @@ -333,25 +340,24 @@ if [ "${wflow_status}" = "SUCCESS" ] || \ msg=" The end-to-end run of the workflow for the forecast experiment specified -by expt_name has completed with the following workflow status (wflow_- -status): +by expt_name has completed with the following workflow status (wflow_status): expt_name = \"${expt_name}\" wflow_status = \"${wflow_status}\" " # # If a cron job was being used to periodically relaunch the workflow, we -# now remove the entry in the crontab corresponding to the workflow be- -# cause the end-to-end run of the workflow has now either succeeded or +# now remove the entry in the crontab corresponding to the workflow +# because the end-to-end run of the workflow has now either succeeded or # failed and will remain in that state without manual user intervention. -# Thus, there is no need to try to relaunch it. We also append a mes- -# sage to the completion message above to indicate this. +# Thus, there is no need to try to relaunch it. We also append a message +# to the completion message above to indicate this. # if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then msg="${msg}\ -Thus, there is no need to relaunch the workflow via a cron job. Remo- -ving from the crontab the line (CRONTAB_LINE) that calls the workflow -launch script for this experiment: +Thus, there is no need to relaunch the workflow via a cron job. Removing +from the crontab the line (CRONTAB_LINE) that calls the workflow launch +script for this experiment: CRONTAB_LINE = \"${CRONTAB_LINE}\" " # @@ -364,12 +370,13 @@ launch script for this experiment: $SED -r -e "s%[*]%\\\\*%g" ) # # In the string passed to the grep command below, we use the line start -# and line end anchors ("^" and "$", respectively) to ensure that we on- -# ly find lines in the crontab that contain exactly the string in cron- -# tab_line_esc_astr without any leading or trailing characters. +# and line end anchors ("^" and "$", respectively) to ensure that we +# only find lines in the crontab that contain exactly the string in +# crontab_line_esc_astr without any leading or trailing characters. # if [ "$MACHINE" = "WCOSS_DELL_P3" ];then - grep -v "^${crontab_line_esc_astr}$" "/u/$USER/cron/mycrontab" > tmpfile && mv tmpfile "/u/$USER/cron/mycrontab" + grep -v "^${crontab_line_esc_astr}$" "/u/$USER/cron/mycrontab" \ + > tmpfile && mv_vrfy tmpfile "/u/$USER/cron/mycrontab" else ( crontab -l | grep -v "^${crontab_line_esc_astr}$" ) | crontab - fi @@ -388,7 +395,3 @@ launch script for this experiment: fi fi - - - - diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 1f0d88126..17b6d5d3d 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -135,8 +135,7 @@ jjob_fp="$2" module purge machine=$(echo_lowercase $MACHINE) -env_fn=${BUILD_ENV_FN:-"build_${machine}_${COMPILER}.env"} -env_fp="${SR_WX_APP_TOP_DIR}/env/${env_fn}" +env_fp="${SR_WX_APP_TOP_DIR}/env/${BUILD_ENV_FN}" source "${env_fp}" || print_err_msg_exit "\ Sourcing platform- and compiler-specific environment file (env_fp) for the workflow task specified by task_name failed: diff --git a/ush/setup.sh b/ush/setup.sh index 88dcab21e..d8db064c6 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -63,6 +63,7 @@ Starting function ${func_name}() in \"${scrfunc_fn}\"... # #----------------------------------------------------------------------- # +. ./check_expt_config_vars.sh . ./set_cycle_dates.sh . ./set_gridparams_GFDLgrid.sh . ./set_gridparams_ESGgrid.sh @@ -111,7 +112,9 @@ if [ -f "${EXPT_CONFIG_FN}" ]; then # configuration file are also assigned default values in the default # configuration file. # - . ./compare_config_scripts.sh + check_expt_config_vars \ + default_config_fp="./${EXPT_DEFAULT_CONFIG_FN}" \ + config_fp="./${EXPT_CONFIG_FN}" # # Now source the user-specified configuration file. # @@ -698,12 +701,27 @@ fi # #----------------------------------------------------------------------- # -# Calculate PPN_RUN_FCST from NCORES_PER_NODE and OMP_NUM_THREADS_RUN_FCST +# Set the names of the build and workflow environment files (if not +# already specified by the user). These are the files that need to be +# sourced before building the component SRW App codes and running various +# workflow scripts, respectively. # #----------------------------------------------------------------------- # -PPN_RUN_FCST_OPT="$(( ${NCORES_PER_NODE} / ${OMP_NUM_THREADS_RUN_FCST} ))" -PPN_RUN_FCST=${PPN_RUN_FCST:-${PPN_RUN_FCST_OPT}} +machine=$(echo_lowercase ${MACHINE}) +WFLOW_ENV_FN=${WFLOW_ENV_FN:-"wflow_${machine}.env"} +BUILD_ENV_FN=${BUILD_ENV_FN:-"build_${machine}_${COMPILER}.env"} +# +#----------------------------------------------------------------------- +# +# Calculate a default value for the number of processes per node for the +# RUN_FCST_TN task. Then set PPN_RUN_FCST to this default value if +# PPN_RUN_FCST is not already specified by the user. +# +#----------------------------------------------------------------------- +# +ppn_run_fcst_default="$(( ${NCORES_PER_NODE} / ${OMP_NUM_THREADS_RUN_FCST} ))" +PPN_RUN_FCST=${PPN_RUN_FCST:-${ppn_run_fcst_default}} # #----------------------------------------------------------------------- # @@ -882,32 +900,32 @@ fi # #----------------------------------------------------------------------- # -CYCL_HRS_str=$(printf "\"%s\" " "${CYCL_HRS[@]}") -CYCL_HRS_str="( $CYCL_HRS_str)" +cycl_hrs_str=$(printf "\"%s\" " "${CYCL_HRS[@]}") +cycl_hrs_str="( ${cycl_hrs_str})" i=0 -for CYCL in "${CYCL_HRS[@]}"; do +for cycl_hr in "${CYCL_HRS[@]}"; do - CYCL_OR_NULL=$( printf "%s" "$CYCL" | $SED -n -r -e "s/^([0-9]{2})$/\1/p" ) + cycl_hr_or_null=$( printf "%s" "${cycl_hr}" | $SED -n -r -e "s/^([0-9]{2})$/\1/p" ) - if [ -z "${CYCL_OR_NULL}" ]; then + if [ -z "${cycl_hr_or_null}" ]; then print_err_msg_exit "\ Each element of CYCL_HRS must be a string consisting of exactly 2 digits -(including a leading \"0\", if necessary) specifying an hour-of-day. Ele- -ment #$i of CYCL_HRS (where the index of the first element is 0) does not -have this form: - CYCL_HRS = $CYCL_HRS_str +(including a leading \"0\", if necessary) specifying an hour-of-day. +Element #$i of CYCL_HRS (where the index of the first element is 0) does +not have this form: + CYCL_HRS = ${cycl_hrs_str} CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi - if [ "${CYCL_OR_NULL}" -lt "0" ] || \ - [ "${CYCL_OR_NULL}" -gt "23" ]; then + if [ "${cycl_hr_or_null}" -lt "0" ] || \ + [ "${cycl_hr_or_null}" -gt "23" ]; then print_err_msg_exit "\ -Each element of CYCL_HRS must be an integer between \"00\" and \"23\", in- -clusive (including a leading \"0\", if necessary), specifying an hour-of- -day. Element #$i of CYCL_HRS (where the index of the first element is 0) -does not have this form: - CYCL_HRS = $CYCL_HRS_str +Each element of CYCL_HRS must be an integer between \"00\" and \"23\", +inclusive (including a leading \"0\", if necessary), specifying an hour- +of-day. Element #$i of CYCL_HRS (where the index of the first element +is 0) does not have this form: + CYCL_HRS = ${cycl_hrs_str} CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi @@ -929,7 +947,7 @@ if [ "${INCR_CYCL_FREQ}" -lt "24" ] && [ "$i" -gt "1" ]; then The number of CYCL_HRS does not match with that expected by INCR_CYCL_FREQ: INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} cycle interval by the number of CYCL_HRS = ${cycl_intv} - CYCL_HRS = $CYCL_HRS_str " + CYCL_HRS = ${cycl_hrs_str}" fi im1=$(( $i-1 )) @@ -941,7 +959,7 @@ The number of CYCL_HRS does not match with that expected by INCR_CYCL_FREQ: print_err_msg_exit "\ Element #${itmp} of CYCL_HRS does not match with the increment of cycle frequency INCR_CYCL_FREQ: - CYCL_HRS = $CYCL_HRS_str + CYCL_HRS = ${cycl_hrs_str} INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} CYCL_HRS[$itmp] = \"${CYCL_HRS[$itmp]}\"" fi @@ -961,7 +979,7 @@ fi set_cycle_dates \ date_start="${DATE_FIRST_CYCL}" \ date_end="${DATE_LAST_CYCL}" \ - cycle_hrs="${CYCL_HRS_str}" \ + cycle_hrs="${cycl_hrs_str}" \ incr_cycl_freq="${INCR_CYCL_FREQ}" \ output_varname_all_cdates="ALL_CDATES" @@ -1000,8 +1018,7 @@ fi # Directory containing various executable files. # # TEMPLATE_DIR: -# Directory in which templates of various FV3-LAM input files are locat- -# ed. +# Directory in which templates of various FV3-LAM input files are located. # # UFS_WTHR_MDL_DIR: # Directory in which the (NEMS-enabled) FV3-LAM application is located. @@ -1118,7 +1135,7 @@ One or more fix file directories have not been specified for this machine: TOPO_DIR = \"${TOPO_DIR:-\"\"} SFC_CLIMO_INPUT_DIR = \"${SFC_CLIMO_INPUT_DIR:-\"\"} FIXLAM_NCO_BASEDIR = \"${FIXLAM_NCO_BASEDIR:-\"\"} -You can specify the missing location(s) in config.sh" +You can specify the missing location(s) in ${EXPT_CONFIG_FN}." fi ;; @@ -1208,7 +1225,7 @@ check_var_valid_value \ # Set USE_CUSTOM_POST_CONFIG_FILE to either "TRUE" or "FALSE" so we don't # have to consider other valid values later on. # -USE_CUSTOM_POST_CONFIG_FILE=$(echo_uppercase $USE_CUSTOM_POST_CONFIG_FILE) +USE_CUSTOM_POST_CONFIG_FILE=$(echo_uppercase ${USE_CUSTOM_POST_CONFIG_FILE}) if [ "$USE_CUSTOM_POST_CONFIG_FILE" = "TRUE" ] || \ [ "$USE_CUSTOM_POST_CONFIG_FILE" = "YES" ]; then USE_CUSTOM_POST_CONFIG_FILE="TRUE" @@ -1441,7 +1458,7 @@ must set DT_SUBHOURLY_POST_MNTS to something other than zero." # # For now, the sub-hourly capability is restricted to having values of # DT_SUBHOURLY_POST_MNTS that evenly divide into 60 minutes. This is -# because the jinja rocoto XML template (FV3LAM_wflow.xml) assumes that +# because the jinja rocoto XML template (${WFLOW_XML_FN}) assumes that # model output is generated at the top of every hour (i.e. at 00 minutes). # This restricts DT_SUBHOURLY_POST_MNTS to the following values (inluding # both cases with and without a leading 0): @@ -1574,8 +1591,8 @@ fi # #----------------------------------------------------------------------- # -# The FV3 forecast model needs the following input files in the run di- -# rectory to start a forecast: +# The FV3 forecast model needs the following input files in the run +# directory to start a forecast: # # (1) The data table file # (2) The diagnostics table file @@ -1583,27 +1600,22 @@ fi # (4) The FV3 namelist file # (5) The model configuration file # (6) The NEMS configuration file -# -# If using CCPP, it also needs: -# # (7) The CCPP physics suite definition file # # The workflow contains templates for the first six of these files. # Template files are versions of these files that contain placeholder -# (i.e. dummy) values for various parameters. The experiment/workflow -# generation scripts copy these templates to appropriate locations in -# the experiment directory (either the top of the experiment directory -# or one of the cycle subdirectories) and replace the placeholders in -# these copies by actual values specified in the experiment/workflow -# configuration file (or derived from such values). The scripts then -# use the resulting "actual" files as inputs to the forecast model. -# -# Note that the CCPP physics suite defintion file does not have a cor- -# responding template file because it does not contain any values that -# need to be replaced according to the experiment/workflow configura- -# tion. If using CCPP, this file simply needs to be copied over from -# its location in the forecast model's directory structure to the ex- -# periment directory. +# (i.e. dummy) values for various parameters. The experiment generation +# and/or the forecast task (i.e. J-job) scripts copy these templates to +# appropriate locations in the experiment directory (e.g. to the top of +# the experiment directory, to one of the cycle subdirectories, etc) and +# replace the placeholders with actual values to obtain the files that +# are used as inputs to the forecast model. +# +# Note that the CCPP physics suite defintion file (SDF) does not have a +# corresponding template file because it does not contain any values +# that need to be replaced according to the experiment configuration. +# This file simply needs to be copied over from its location in the +# forecast model's directory structure to the experiment directory. # # Below, we first set the names of the templates for the first six files # listed above. We then set the full paths to these template files. @@ -2057,7 +2069,6 @@ Reset value is:" print_info_msg "$msg" fi - # #----------------------------------------------------------------------- # @@ -2181,7 +2192,6 @@ fi #----------------------------------------------------------------------- # . ./set_extrn_mdl_params.sh - # #----------------------------------------------------------------------- # @@ -2536,7 +2546,6 @@ fi #----------------------------------------------------------------------- # NNODES_RUN_FCST=$(( (PE_MEMBER01 + PPN_RUN_FCST - 1)/PPN_RUN_FCST )) - # #----------------------------------------------------------------------- # @@ -2584,179 +2593,106 @@ set_thompson_mp_fix_files \ # #----------------------------------------------------------------------- # -# Generate the shell script that will appear in the experiment directory -# (EXPTDIR) and will contain definitions of variables needed by the va- -# rious scripts in the workflow. We refer to this as the experiment/ -# workflow global variable definitions file. We will create this file -# by: -# -# 1) Copying the default workflow/experiment configuration file (speci- -# fied by EXPT_DEFAULT_CONFIG_FN and located in the shell script di- -# rectory specified by USHDIR) to the experiment directory and rena- -# ming it to the name specified by GLOBAL_VAR_DEFNS_FN. -# -# 2) Resetting the default variable values in this file to their current -# values. This is necessary because these variables may have been -# reset by the user-specified configuration file (if one exists in -# USHDIR) and/or by this setup script, e.g. because predef_domain is -# set to a valid non-empty value. -# -# 3) Appending to the variable definitions file any new variables intro- -# duced in this setup script that may be needed by the scripts that -# perform the various tasks in the workflow (and which source the va- -# riable defintions file). -# -# First, set the full path to the variable definitions file and copy the -# default configuration script into it. +# Set the full path to the experiment's variable definitions file. This +# file will contain definitions of variables (in bash syntax) needed by +# the various scripts in the workflow. # #----------------------------------------------------------------------- # -GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" -cp_vrfy $USHDIR/${EXPT_DEFAULT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} +GLOBAL_VAR_DEFNS_FP="$EXPTDIR/${GLOBAL_VAR_DEFNS_FN}" # #----------------------------------------------------------------------- # -# +# Get the list of primary experiment variables and their default values +# from the default experiment configuration file (EXPT_DEFAULT_CONFIG_FN). +# By "primary", we mean those variables that are defined in the default +# configuration file and can be reset in the user-specified experiment +# configuration file (EXPT_CONFIG_FN). The default values will be updated +# below to user-specified ones and the result saved in the experiment's +# variable definitions file. # #----------------------------------------------------------------------- # +print_info_msg " +Creating list of default experiment variable definitions..." -# Read all lines of GLOBAL_VAR_DEFNS file into the variable line_list. -line_list=$( $SED -r -e "s/(.*)/\1/g" ${GLOBAL_VAR_DEFNS_FP} ) -# -# Loop through the lines in line_list and concatenate lines ending with -# the line bash continuation character "\". -# -rm_vrfy ${GLOBAL_VAR_DEFNS_FP} -while read crnt_line; do - printf "%s\n" "${crnt_line}" >> ${GLOBAL_VAR_DEFNS_FP} -done <<< "${line_list}" -# -#----------------------------------------------------------------------- -# -# The following comment block needs to be updated because now line_list -# may contain lines that are not assignment statements (e.g. it may con- -# tain if-statements). Such lines are ignored in the while-loop below. -# -# Reset each of the variables in the variable definitions file to its -# value in the current environment. To accomplish this, we: -# -# 1) Create a list of variable settings by stripping out comments, blank -# lines, extraneous leading whitespace, etc from the variable defini- -# tions file (which is currently identical to the default workflow/ -# experiment configuration script) and saving the result in the vari- -# able line_list. Each line of line_list will have the form -# -# VAR=... -# -# where the VAR is a variable name and ... is the value from the de- -# fault configuration script (which does not necessarily correspond -# to the current value of the variable). -# -# 2) Loop through each line of line_list. For each line, we extract the -# variable name (and save it in the variable var_name), get its value -# from the current environment (using bash indirection, i.e. -# ${!var_name}), and use the set_file_param() function to replace the -# value of the variable in the variable definitions script (denoted -# above by ...) with its current value. -# -#----------------------------------------------------------------------- -# -# Also should remove trailing whitespace... -line_list=$( $SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${GLOBAL_VAR_DEFNS_FP} ) +get_bash_file_contents fp="$USHDIR/${EXPT_DEFAULT_CONFIG_FN}" \ + output_varname_contents="default_var_defns" print_info_msg "$DEBUG" " -Before updating default values of experiment variables to user-specified -values, the variable \"line_list\" contains: +The variable \"default_var_defns\" containing default values of primary +experiment variables is set as follows: -${line_list} +${default_var_defns} " # #----------------------------------------------------------------------- # -# Add a comment at the beginning of the variable definitions file that -# indicates that the first section of that file is (mostly) the same as -# the configuration file. -# -#----------------------------------------------------------------------- -# -read -r -d '' str_to_insert << EOM +# Create a list of primary experiment variable definitions containing +# updated values. By "updated", we mean non-default values. Values +# may have been updated due to the presence of user-specified values in +# the experiment configuration file (EXPT_CONFIG_FN) or due to other +# considerations (e.g. resetting depending on the platform the App is +# running on). # #----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# Section 1: -# This section is a copy of the default experiment configuration file -# (${EXPT_DEFAULT_CONFIG_FN}) in the shell scripts directory specified by USHDIR -# except that variable values have been updated to those for the experiment -# (as opposed to the default values). -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# -EOM # -# Replace all occurrences of actual newlines in the variable str_to_insert -# with escaped backslash-n. This is needed for the sed command below to -# work properly (i.e. to avoid it failing with an "unterminated `s' command" -# error message). -# -str_to_insert=${str_to_insert//$'\n'/\\n} +print_info_msg " +Creating lists of (updated) experiment variable definitions..." # -# Insert str_to_insert into GLOBAL_VAR_DEFNS_FP right after the line -# containing the name of the interpreter (i.e. the line that starts with -# the string "#!", e.g. "#!/bin/bash"). +# Set the flag that specifies whether or not array variables will be +# recorded in the variable definitions file on one line or one element +# per line. Then, if writing arrays one element per line (i.e. multiline), +# set an escaped-newline character that needs to be included after every +# element of each array as the newline character in order for sed to +# write the line properly. # -regexp="(^#!.*)" -$SED -i -r -e "s|$regexp|\1\n\n${str_to_insert}\n|g" ${GLOBAL_VAR_DEFNS_FP} +multiline_arrays="TRUE" +#multiline_arrays="FALSE" +escbksl_nl_or_null="" +if [ "${multiline_arrays}" = "TRUE" ]; then + escbksl_nl_or_null='\\\n' +fi # -# Loop through the lines in line_list. +# Loop through the lines in default_var_defns. Reset the value of the +# variable on each line to the updated value (e.g. to a user-specified +# value, as opposed to the default value). The updated list of variables +# and values will be saved in var_defns. # -print_info_msg " -Generating the global experiment variable definitions file specified by -GLOBAL_VAR_DEFNS_FN: - GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" -Full path to this file is: - GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" -For more detailed information, set DEBUG to \"TRUE\" in the experiment -configuration file (\"${EXPT_CONFIG_FN}\")." - -template_var_names=() -template_var_values=() +var_defns="" while read crnt_line; do # # Try to obtain the name of the variable being set on the current line. -# This will be successful only if the line consists of one or more char- -# acters representing the name of a variable (recall that in generating -# the variable line_list, leading spaces on each line were stripped out), -# followed by an equal sign, followed by zero or more characters -# representing the value that the variable is being set to. +# This will be successful only if the line consists of one or more non- +# whitespace characters representing the name of a variable followed by +# an equal sign, followed by zero or more characters representing the +# value that the variable is being set to. (Recall that in generating +# the variable default_var_defns, leading spaces on each line were +# stripped out). # var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ ]*)=.*/\1/p" ) # # If var_name is not empty, then a variable name was found on the current -# line in line_list. +# line in default_var_defns. # - if [ ! -z $var_name ]; then + if [ ! -z ${var_name} ]; then print_info_msg "$DEBUG" " var_name = \"${var_name}\"" # # If the variable specified in var_name is set in the current environment -# (to either an empty or non-empty string), get its value and insert it -# in the variable definitions file on the line where that variable is -# defined. Note that +# (to either an empty or non-empty string), get its value and save it in +# var_value. Note that # # ${!var_name+x} # # will retrun the string "x" if the variable specified in var_name is # set (to either an empty or non-empty string), and it will return an -# empty string if the variable specified in var_name is unset (i.e. un- -# defined). +# empty string if the variable specified in var_name is unset (i.e. if +# it is undefined). # - if [ ! -z ${!var_name+x} ]; then + unset "var_value" + if [ ! -z "${!var_name+x}" ]; then # # The variable may be a scalar or an array. Thus, we first treat it as # an array and obtain the number of elements that it contains. @@ -2765,49 +2701,40 @@ var_name = \"${var_name}\"" array=("${!array_name_at}") num_elems="${#array[@]}" # -# We will now set the variable var_value to the string that needs to be -# placed on the right-hand side of the assignment operator (=) on the -# appropriate line in the variable definitions file. How this is done -# depends on whether the variable is a scalar or an array. +# Set var_value to the updated value of the current experiment variable. +# How this is done depends on whether the variable is a scalar or an +# array. # # If the variable contains only one element, then it is a scalar. (It # could be a 1-element array, but for simplicity, we treat that case as # a scalar.) In this case, we enclose its value in double quotes and # save the result in var_value. # - if [ "$num_elems" -eq 1 ]; then + if [ "${num_elems}" -eq 1 ]; then + var_value="${!var_name}" - var_value="\"${var_value}\"" + rhs="'${var_value}'" # # If the variable contains more than one element, then it is an array. # In this case, we build var_value in two steps as follows: # # 1) Generate a string containing each element of the array in double -# quotes and followed by a space. +# quotes and followed by a space (and followed by an optional backslash +# and newline if multiline_arrays has been set to "TRUE"). # # 2) Place parentheses around the double-quoted list of array elements # generated in the first step. Note that there is no need to put a -# space before the closing parenthesis because in step 1, we have -# already placed a space after the last element. +# space before the closing parenthesis because during step 1 above, +# a space has already been placed after the last array element. # else - arrays_on_one_line="TRUE" - arrays_on_one_line="FALSE" - - if [ "${arrays_on_one_line}" = "TRUE" ]; then - var_value=$(printf "\"%s\" " "${!array_name_at}") -# var_value=$(printf "\"%s\" \\\\\\ \\\n" "${!array_name_at}") - else -# var_value=$(printf "%s" "\\\\\\n") - var_value="\\\\\n" - for (( i=0; i<${num_elems}; i++ )); do -# var_value=$(printf "%s\"%s\" %s" "${var_value}" "${array[$i]}" "\\\\\\n") - var_value="${var_value}\"${array[$i]}\" \\\\\n" -# var_value="${var_value}\"${array[$i]}\" " - done - fi - var_value="( $var_value)" + var_value="" + printf -v "var_value" "${escbksl_nl_or_null}" + for (( i=0; i<${num_elems}; i++ )); do + printf -v "var_value" "${var_value}\"${array[$i]}\" ${escbksl_nl_or_null}" + done + rhs="( ${var_value})" fi # @@ -2824,52 +2751,83 @@ The variable specified by \"var_name\" is not set in the current environment: var_name = \"${var_name}\" Setting its value in the variable definitions file to an empty string." - var_value="\"\"" + rhs="''" fi # -# Now place var_value on the right-hand side of the assignment statement -# on the appropriate line in the variable definitions file. +# Set the line containing the variable's definition. Then add the line +# to the list of all variable definitions. # - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "${var_name}" "${var_value}" + var_defn="${var_name}=$rhs" + printf -v "var_defns" "${var_defns}${var_defn}\n" # # If var_name is empty, then a variable name was not found on the current -# line in line_list. In this case, print out a warning and move on to -# the next line. +# line in default_var_defns. In this case, print out a warning and move +# on to the next line. # else print_info_msg " -Could not extract a variable name from the current line in \"line_list\" +Could not extract a variable name from the current line in \"default_var_defns\" (probably because it does not contain an equal sign with no spaces on either side): crnt_line = \"${crnt_line}\" var_name = \"${var_name}\" -Continuing to next line in \"line_list\"." +Continuing to next line in \"default_var_defns\"." fi -done <<< "${line_list}" +done <<< "${default_var_defns}" # #----------------------------------------------------------------------- # -# Append additional variable definitions (and comments) to the variable -# definitions file. These variables have been set above using the vari- -# ables in the default and local configuration scripts. These variables -# are needed by various tasks/scripts in the workflow. +# Construct the experiment's variable definitions file. Below, we first +# record the contents we want to place in this file in the variable +# var_defns_file_contents, and we then write the contents of this +# variable to the file. # #----------------------------------------------------------------------- # -{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} +print_info_msg " +Generating the global experiment variable definitions file specified by +GLOBAL_VAR_DEFNS_FN: + GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" +Full path to this file is: + GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" +For more detailed information, set DEBUG to \"TRUE\" in the experiment +configuration file (\"${EXPT_CONFIG_FN}\")." +var_defns_file_contents="\ +# +#----------------------------------------------------------------------- +#----------------------------------------------------------------------- +# Section 1: +# This section contains (most of) the primary experiment variables, i.e. +# those variables that are defined in the default configuration file +# (${EXPT_DEFAULT_CONFIG_FN}) and that can be reset via the user-specified +# experiment configuration file (${EXPT_CONFIG_FN}). +#----------------------------------------------------------------------- +#----------------------------------------------------------------------- +# +${var_defns}" +# +# Append derived/secondary variable definitions (as well as comments) to +# the contents of the variable definitions file. +# +ensmem_names_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ENSMEM_NAMES[@]}") +ensmem_names_str=$(printf "( %s${escbksl_nl_or_null})" "${ensmem_names_str}") + +fv3_nml_ensmem_fps_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}") +fv3_nml_ensmem_fps_str=$(printf "( %s${escbksl_nl_or_null})" "${fv3_nml_ensmem_fps_str}") + +var_defns_file_contents=${var_defns_file_contents}"\ # #----------------------------------------------------------------------- #----------------------------------------------------------------------- # Section 2: -# This section defines variables that have been derived from the ones -# above by the setup script (setup.sh) and which are needed by one or -# more of the scripts that perform the workflow tasks (those scripts -# source this variable definitions file). +# This section defines variables that have been derived from the primary +# set of experiment variables above (we refer to these as \"derived\" or +# \"secondary\" variables). #----------------------------------------------------------------------- #----------------------------------------------------------------------- # @@ -2877,15 +2835,15 @@ done <<< "${line_list}" # #----------------------------------------------------------------------- # -# Full path to workflow launcher script, its log file, and the line that -# gets added to the cron table to launch this script if USE_CRON_TO_RELAUNCH -# is set to TRUE. +# Full path to workflow (re)launch script, its log file, and the line +# that gets added to the cron table to launch this script if the flag +# USE_CRON_TO_RELAUNCH is set to \"TRUE\". # #----------------------------------------------------------------------- # -WFLOW_LAUNCH_SCRIPT_FP="${WFLOW_LAUNCH_SCRIPT_FP}" -WFLOW_LAUNCH_LOG_FP="${WFLOW_LAUNCH_LOG_FP}" -CRONTAB_LINE="${CRONTAB_LINE}" +WFLOW_LAUNCH_SCRIPT_FP='${WFLOW_LAUNCH_SCRIPT_FP}' +WFLOW_LAUNCH_LOG_FP='${WFLOW_LAUNCH_LOG_FP}' +CRONTAB_LINE='${CRONTAB_LINE}' # #----------------------------------------------------------------------- # @@ -2893,44 +2851,44 @@ CRONTAB_LINE="${CRONTAB_LINE}" # #----------------------------------------------------------------------- # -SR_WX_APP_TOP_DIR="${SR_WX_APP_TOP_DIR}" -HOMErrfs="$HOMErrfs" -USHDIR="$USHDIR" -SCRIPTSDIR="$SCRIPTSDIR" -JOBSDIR="$JOBSDIR" -SORCDIR="$SORCDIR" -SRC_DIR="$SRC_DIR" -PARMDIR="$PARMDIR" -MODULES_DIR="${MODULES_DIR}" -EXECDIR="$EXECDIR" -FIXam="$FIXam" -FIXclim="$FIXclim" -FIXLAM="$FIXLAM" -FIXgsm="$FIXgsm" -FIXaer="$FIXaer" -FIXlut="$FIXlut" -COMROOT="$COMROOT" -COMOUT_BASEDIR="${COMOUT_BASEDIR}" -TEMPLATE_DIR="${TEMPLATE_DIR}" -VX_CONFIG_DIR="${VX_CONFIG_DIR}" -METPLUS_CONF="${METPLUS_CONF}" -MET_CONFIG="${MET_CONFIG}" -UFS_WTHR_MDL_DIR="${UFS_WTHR_MDL_DIR}" -UFS_UTILS_DIR="${UFS_UTILS_DIR}" -SFC_CLIMO_INPUT_DIR="${SFC_CLIMO_INPUT_DIR}" -TOPO_DIR="${TOPO_DIR}" -UPP_DIR="${UPP_DIR}" - -EXPTDIR="$EXPTDIR" -LOGDIR="$LOGDIR" -CYCLE_BASEDIR="${CYCLE_BASEDIR}" -GRID_DIR="${GRID_DIR}" -OROG_DIR="${OROG_DIR}" -SFC_CLIMO_DIR="${SFC_CLIMO_DIR}" - -NDIGITS_ENSMEM_NAMES="${NDIGITS_ENSMEM_NAMES}" -ENSMEM_NAMES=( $( printf "\"%s\" " "${ENSMEM_NAMES[@]}" )) -FV3_NML_ENSMEM_FPS=( $( printf "\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}" )) +SR_WX_APP_TOP_DIR='${SR_WX_APP_TOP_DIR}' +HOMErrfs='$HOMErrfs' +USHDIR='$USHDIR' +SCRIPTSDIR='$SCRIPTSDIR' +JOBSDIR='$JOBSDIR' +SORCDIR='$SORCDIR' +SRC_DIR='${SRC_DIR}' +PARMDIR='$PARMDIR' +MODULES_DIR='${MODULES_DIR}' +EXECDIR='$EXECDIR' +FIXam='$FIXam' +FIXclim='$FIXclim' +FIXLAM='$FIXLAM' +FIXgsm='$FIXgsm' +FIXaer='$FIXaer' +FIXlut='$FIXlut' +COMROOT='$COMROOT' +COMOUT_BASEDIR='${COMOUT_BASEDIR}' +TEMPLATE_DIR='${TEMPLATE_DIR}' +VX_CONFIG_DIR='${VX_CONFIG_DIR}' +METPLUS_CONF='${METPLUS_CONF}' +MET_CONFIG='${MET_CONFIG}' +UFS_WTHR_MDL_DIR='${UFS_WTHR_MDL_DIR}' +UFS_UTILS_DIR='${UFS_UTILS_DIR}' +SFC_CLIMO_INPUT_DIR='${SFC_CLIMO_INPUT_DIR}' +TOPO_DIR='${TOPO_DIR}' +UPP_DIR='${UPP_DIR}' + +EXPTDIR='$EXPTDIR' +LOGDIR='$LOGDIR' +CYCLE_BASEDIR='${CYCLE_BASEDIR}' +GRID_DIR='${GRID_DIR}' +OROG_DIR='${OROG_DIR}' +SFC_CLIMO_DIR='${SFC_CLIMO_DIR}' + +NDIGITS_ENSMEM_NAMES='${NDIGITS_ENSMEM_NAMES}' +ENSMEM_NAMES=${ensmem_names_str} +FV3_NML_ENSMEM_FPS=${fv3_nml_ensmem_fps_str} # #----------------------------------------------------------------------- # @@ -2938,46 +2896,43 @@ FV3_NML_ENSMEM_FPS=( $( printf "\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}" )) # #----------------------------------------------------------------------- # -GLOBAL_VAR_DEFNS_FP="${GLOBAL_VAR_DEFNS_FP}" -# Try this at some point instead of hard-coding it as above; it's a more -# flexible approach (if it works). -#GLOBAL_VAR_DEFNS_FP=$( $READLINK -f "${BASH_SOURCE[0]}" ) +GLOBAL_VAR_DEFNS_FP='${GLOBAL_VAR_DEFNS_FP}' -DATA_TABLE_TMPL_FN="${DATA_TABLE_TMPL_FN}" -DIAG_TABLE_TMPL_FN="${DIAG_TABLE_TMPL_FN}" -FIELD_TABLE_TMPL_FN="${FIELD_TABLE_TMPL_FN}" -MODEL_CONFIG_TMPL_FN="${MODEL_CONFIG_TMPL_FN}" -NEMS_CONFIG_TMPL_FN="${NEMS_CONFIG_TMPL_FN}" +DATA_TABLE_TMPL_FN='${DATA_TABLE_TMPL_FN}' +DIAG_TABLE_TMPL_FN='${DIAG_TABLE_TMPL_FN}' +FIELD_TABLE_TMPL_FN='${FIELD_TABLE_TMPL_FN}' +MODEL_CONFIG_TMPL_FN='${MODEL_CONFIG_TMPL_FN}' +NEMS_CONFIG_TMPL_FN='${NEMS_CONFIG_TMPL_FN}' -DATA_TABLE_TMPL_FP="${DATA_TABLE_TMPL_FP}" -DIAG_TABLE_TMPL_FP="${DIAG_TABLE_TMPL_FP}" -FIELD_TABLE_TMPL_FP="${FIELD_TABLE_TMPL_FP}" -FV3_NML_BASE_SUITE_FP="${FV3_NML_BASE_SUITE_FP}" -FV3_NML_YAML_CONFIG_FP="${FV3_NML_YAML_CONFIG_FP}" -FV3_NML_BASE_ENS_FP="${FV3_NML_BASE_ENS_FP}" -MODEL_CONFIG_TMPL_FP="${MODEL_CONFIG_TMPL_FP}" -NEMS_CONFIG_TMPL_FP="${NEMS_CONFIG_TMPL_FP}" +DATA_TABLE_TMPL_FP='${DATA_TABLE_TMPL_FP}' +DIAG_TABLE_TMPL_FP='${DIAG_TABLE_TMPL_FP}' +FIELD_TABLE_TMPL_FP='${FIELD_TABLE_TMPL_FP}' +FV3_NML_BASE_SUITE_FP='${FV3_NML_BASE_SUITE_FP}' +FV3_NML_YAML_CONFIG_FP='${FV3_NML_YAML_CONFIG_FP}' +FV3_NML_BASE_ENS_FP='${FV3_NML_BASE_ENS_FP}' +MODEL_CONFIG_TMPL_FP='${MODEL_CONFIG_TMPL_FP}' +NEMS_CONFIG_TMPL_FP='${NEMS_CONFIG_TMPL_FP}' -CCPP_PHYS_SUITE_FN="${CCPP_PHYS_SUITE_FN}" -CCPP_PHYS_SUITE_IN_CCPP_FP="${CCPP_PHYS_SUITE_IN_CCPP_FP}" -CCPP_PHYS_SUITE_FP="${CCPP_PHYS_SUITE_FP}" +CCPP_PHYS_SUITE_FN='${CCPP_PHYS_SUITE_FN}' +CCPP_PHYS_SUITE_IN_CCPP_FP='${CCPP_PHYS_SUITE_IN_CCPP_FP}' +CCPP_PHYS_SUITE_FP='${CCPP_PHYS_SUITE_FP}' -FIELD_DICT_FN="${FIELD_DICT_FN}" -FIELD_DICT_IN_UWM_FP="${FIELD_DICT_IN_UWM_FP}" -FIELD_DICT_FP="${FIELD_DICT_FP}" +FIELD_DICT_FN='${FIELD_DICT_FN}' +FIELD_DICT_IN_UWM_FP='${FIELD_DICT_IN_UWM_FP}' +FIELD_DICT_FP='${FIELD_DICT_FP}' -DATA_TABLE_FP="${DATA_TABLE_FP}" -FIELD_TABLE_FP="${FIELD_TABLE_FP}" -FV3_NML_FN="${FV3_NML_FN}" # This may not be necessary... -FV3_NML_FP="${FV3_NML_FP}" -NEMS_CONFIG_FP="${NEMS_CONFIG_FP}" +DATA_TABLE_FP='${DATA_TABLE_FP}' +FIELD_TABLE_FP='${FIELD_TABLE_FP}' +FV3_NML_FN='${FV3_NML_FN}' +FV3_NML_FP='${FV3_NML_FP}' +NEMS_CONFIG_FP='${NEMS_CONFIG_FP}' -FV3_EXEC_FP="${FV3_EXEC_FP}" +FV3_EXEC_FP='${FV3_EXEC_FP}' -LOAD_MODULES_RUN_TASK_FP="${LOAD_MODULES_RUN_TASK_FP}" +LOAD_MODULES_RUN_TASK_FP='${LOAD_MODULES_RUN_TASK_FP}' -THOMPSON_MP_CLIMO_FN="${THOMPSON_MP_CLIMO_FN}" -THOMPSON_MP_CLIMO_FP="${THOMPSON_MP_CLIMO_FP}" +THOMPSON_MP_CLIMO_FN='${THOMPSON_MP_CLIMO_FN}' +THOMPSON_MP_CLIMO_FP='${THOMPSON_MP_CLIMO_FP}' # #----------------------------------------------------------------------- # @@ -2985,115 +2940,99 @@ THOMPSON_MP_CLIMO_FP="${THOMPSON_MP_CLIMO_FP}" # #----------------------------------------------------------------------- # -RELATIVE_LINK_FLAG="${RELATIVE_LINK_FLAG}" +RELATIVE_LINK_FLAG='${RELATIVE_LINK_FLAG}' # #----------------------------------------------------------------------- # # Parameters that indicate whether or not various parameterizations are -# included in and called by the phsics suite. +# included in and called by the physics suite. # #----------------------------------------------------------------------- # -SDF_USES_RUC_LSM="${SDF_USES_RUC_LSM}" -SDF_USES_THOMPSON_MP="${SDF_USES_THOMPSON_MP}" +SDF_USES_RUC_LSM='${SDF_USES_RUC_LSM}' +SDF_USES_THOMPSON_MP='${SDF_USES_THOMPSON_MP}' # #----------------------------------------------------------------------- # -# Grid configuration parameters needed regardless of grid generation me- -# thod used. +# Grid configuration parameters needed regardless of grid generation +# method used. # #----------------------------------------------------------------------- # -GTYPE="$GTYPE" -TILE_RGNL="${TILE_RGNL}" -NH0="${NH0}" -NH3="${NH3}" -NH4="${NH4}" +GTYPE='$GTYPE' +TILE_RGNL='${TILE_RGNL}' +NH0='${NH0}' +NH3='${NH3}' +NH4='${NH4}' -LON_CTR="${LON_CTR}" -LAT_CTR="${LAT_CTR}" -NX="${NX}" -NY="${NY}" -NHW="${NHW}" -STRETCH_FAC="${STRETCH_FAC}" +LON_CTR='${LON_CTR}' +LAT_CTR='${LAT_CTR}' +NX='${NX}' +NY='${NY}' +NHW='${NHW}' +STRETCH_FAC='${STRETCH_FAC}' -RES_IN_FIXLAM_FILENAMES="${RES_IN_FIXLAM_FILENAMES}" +RES_IN_FIXLAM_FILENAMES='${RES_IN_FIXLAM_FILENAMES}' # -# If running the make_grid task, CRES will be set to a null string du- -# the grid generation step. It will later be set to an actual value af- -# ter the make_grid task is complete. +# If running the make_grid task, CRES will be set to a null string during +# the grid generation step. It will later be set to an actual value after +# the make_grid task is complete. # -CRES="$CRES" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append new variable definitions to variable -definitions file returned with a nonzero status." +CRES='$CRES' +" # #----------------------------------------------------------------------- # -# Append to the variable definitions file the defintions of grid parame- -# ters that are specific to the grid generation method used. +# Append to the variable definitions file the defintions of grid parameters +# that are specific to the grid generation method used. # #----------------------------------------------------------------------- # +grid_vars_str="" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} + grid_vars_str="\ # #----------------------------------------------------------------------- # # Grid configuration parameters for a regional grid generated from a -# global parent cubed-sphere grid. This is the method originally sug- -# gested by GFDL since it allows GFDL's nested grid generator to be used -# to generate a regional grid. However, for large regional domains, it -# results in grids that have an unacceptably large range of cell sizes +# global parent cubed-sphere grid. This is the method originally +# suggested by GFDL since it allows GFDL's nested grid generator to be +# used to generate a regional grid. However, for large regional domains, +# it results in grids that have an unacceptably large range of cell sizes # (i.e. ratio of maximum to minimum cell size is not sufficiently close # to 1). # #----------------------------------------------------------------------- # -ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append grid parameters to variable definitions -file returned with a nonzero status." +ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +" elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then - { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} + grid_vars_str="\ # #----------------------------------------------------------------------- # -# Grid configuration parameters for a regional grid generated indepen- -# dently of a global parent grid. This method was developed by Jim Pur- -# ser of EMC and results in very uniform grids (i.e. ratio of maximum to -# minimum cell size is very close to 1). +# Grid configuration parameters for a regional grid generated independently +# of a global parent grid. This method was developed by Jim Purser of +# EMC and results in very uniform grids (i.e. ratio of maximum to minimum +# cell size is very close to 1). # #----------------------------------------------------------------------- # -DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" -DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" -NEG_NX_OF_DOM_WITH_WIDE_HALO="${NEG_NX_OF_DOM_WITH_WIDE_HALO}" -NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" -PAZI="${PAZI}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append grid parameters to variable definitions -file returned with a nonzero status." +DEL_ANGLE_X_SG='${DEL_ANGLE_X_SG}' +DEL_ANGLE_Y_SG='${DEL_ANGLE_Y_SG}' +NEG_NX_OF_DOM_WITH_WIDE_HALO='${NEG_NX_OF_DOM_WITH_WIDE_HALO}' +NEG_NY_OF_DOM_WITH_WIDE_HALO='${NEG_NY_OF_DOM_WITH_WIDE_HALO}' +PAZI='${PAZI}' +" fi -# -#----------------------------------------------------------------------- -# -# Because RUN_CMD_FCST can include PE_MEMBER01 (and theoretically other -# variables calculated in this script), delete the first occurrence of it -# in the var_defns file, and write it again at the end. -# -#----------------------------------------------------------------------- -$SED -i '/^RUN_CMD_FCST=/d' $GLOBAL_VAR_DEFNS_FP +var_defns_file_contents="${var_defns_file_contents}${grid_vars_str}" # #----------------------------------------------------------------------- # @@ -3102,15 +3041,22 @@ $SED -i '/^RUN_CMD_FCST=/d' $GLOBAL_VAR_DEFNS_FP # #----------------------------------------------------------------------- # -{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} +lbc_spec_fcst_hrs_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${LBC_SPEC_FCST_HRS[@]}") +lbc_spec_fcst_hrs_str=$(printf "( %s${escbksl_nl_or_null})" "${lbc_spec_fcst_hrs_str}") + +all_cdates_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ALL_CDATES[@]}") +all_cdates_str=$(printf "( %s${escbksl_nl_or_null})" "${all_cdates_str}") + +var_defns_file_contents=${var_defns_file_contents}"\ # #----------------------------------------------------------------------- # -# CPL: parameter for coupling in model_configure +# Flag in the \"${MODEL_CONFIG_FN}\" file for coupling the ocean model to +# the weather model. # #----------------------------------------------------------------------- # -CPL="${CPL}" +CPL='${CPL}' # #----------------------------------------------------------------------- # @@ -3119,11 +3065,11 @@ CPL="${CPL}" # #----------------------------------------------------------------------- # -OZONE_PARAM="${OZONE_PARAM}" +OZONE_PARAM='${OZONE_PARAM}' # #----------------------------------------------------------------------- # -# If USE_USER_STAGED_EXTRN_FILES is set to "FALSE", this is the system +# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system # directory in which the workflow scripts will look for the files generated # by the external model specified in EXTRN_MDL_NAME_ICS. These files will # be used to generate the input initial condition and surface files for @@ -3131,11 +3077,11 @@ OZONE_PARAM="${OZONE_PARAM}" # #----------------------------------------------------------------------- # -EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS}" +EXTRN_MDL_SYSBASEDIR_ICS='${EXTRN_MDL_SYSBASEDIR_ICS}' # #----------------------------------------------------------------------- # -# If USE_USER_STAGED_EXTRN_FILES is set to "FALSE", this is the system +# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system # directory in which the workflow scripts will look for the files generated # by the external model specified in EXTRN_MDL_NAME_LBCS. These files # will be used to generate the input lateral boundary condition files for @@ -3143,7 +3089,7 @@ EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS}" # #----------------------------------------------------------------------- # -EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS}" +EXTRN_MDL_SYSBASEDIR_LBCS='${EXTRN_MDL_SYSBASEDIR_LBCS}' # #----------------------------------------------------------------------- # @@ -3152,7 +3098,7 @@ EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS}" # #----------------------------------------------------------------------- # -EXTRN_MDL_LBCS_OFFSET_HRS="${EXTRN_MDL_LBCS_OFFSET_HRS}" +EXTRN_MDL_LBCS_OFFSET_HRS='${EXTRN_MDL_LBCS_OFFSET_HRS}' # #----------------------------------------------------------------------- # @@ -3161,31 +3107,33 @@ EXTRN_MDL_LBCS_OFFSET_HRS="${EXTRN_MDL_LBCS_OFFSET_HRS}" # #----------------------------------------------------------------------- # -LBC_SPEC_FCST_HRS=(${LBC_SPEC_FCST_HRS[@]}) +LBC_SPEC_FCST_HRS=${lbc_spec_fcst_hrs_str} # #----------------------------------------------------------------------- # -# The number of cycles for which to make forecasts and the list of starting -# dates/hours of these cycles. +# The number of cycles for which to make forecasts and the list of +# starting dates/hours of these cycles. # #----------------------------------------------------------------------- # -NUM_CYCLES="${NUM_CYCLES}" -ALL_CDATES=( \\ -$( printf "\"%s\" \\\\\n" "${ALL_CDATES[@]}" ) -) +NUM_CYCLES='${NUM_CYCLES}' +ALL_CDATES=${all_cdates_str} # #----------------------------------------------------------------------- # -# If USE_FVCOM is set to TRUE, then FVCOM data (located in FVCOM_DIR -# in FVCOM_FILE) will be used to update lower boundary conditions during -# make_ics. +# Parameters that determine whether FVCOM data will be used, and if so, +# their location. +# +# If USE_FVCOM is set to \"TRUE\", then FVCOM data (in the file FVCOM_FILE +# located in the directory FVCOM_DIR) will be used to update the surface +# boundary conditions during the initial conditions generation task +# (MAKE_ICS_TN). # #----------------------------------------------------------------------- # -USE_FVCOM="${USE_FVCOM}" -FVCOM_DIR="${FVCOM_DIR}" -FVCOM_FILE="${FVCOM_FILE}" +USE_FVCOM='${USE_FVCOM}' +FVCOM_DIR='${FVCOM_DIR}' +FVCOM_FILE='${FVCOM_FILE}' # #----------------------------------------------------------------------- # @@ -3193,22 +3141,26 @@ FVCOM_FILE="${FVCOM_FILE}" # #----------------------------------------------------------------------- # -NCORES_PER_NODE="${NCORES_PER_NODE}" -PE_MEMBER01="${PE_MEMBER01}" -RUN_CMD_FCST="$(eval echo \'${RUN_CMD_FCST}\')" +PE_MEMBER01='${PE_MEMBER01}' # #----------------------------------------------------------------------- # -# IF DO_SPP="TRUE," N_VAR_SPP is the number of parameterizations that -# are perturbed with SPP, otherwise N_VAR_SPP=0. +# IF DO_SPP is set to \"TRUE\", N_VAR_SPP specifies the number of physics +# parameterizations that are perturbed with SPP. Otherwise, N_VAR_SPP +# is set 0. # #----------------------------------------------------------------------- # -N_VAR_SPP="${N_VAR_SPP}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append new variable definitions to variable -definitions file returned with a nonzero status." +N_VAR_SPP='${N_VAR_SPP}' +" +# +# Done with constructing the contents of the variable definitions file, +# so now write the contents to file. +# +printf "%s\n" "${var_defns_file_contents}" >> ${GLOBAL_VAR_DEFNS_FP} + +print_info_msg "$VERBOSE" " +Done generating the global experiment variable definitions file." # #----------------------------------------------------------------------- # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 375543d35..60162aa40 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -211,6 +211,15 @@ function source_util_funcs() { #----------------------------------------------------------------------- # . ${bashutils_dir}/create_symlink_to_file.sh +# +#----------------------------------------------------------------------- +# +# Source the file containing the function that gets the stripped contents +# of a bash script or function. +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/get_bash_file_contents.sh } source_util_funcs From 20a149d25fee405885c0923d54c462d6d6ccb338 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Thu, 20 Jan 2022 12:28:08 -0500 Subject: [PATCH 13/15] Update divergence damping parameters (#657) * update nord and d4_bg * Change values of ice_nc, rain_nc, and sgs_tke for new divergence damping --- ush/load_modules_run_task.sh | 1 + ush/templates/field_table.FV3_CPT_v0 | 6 +++--- ush/templates/field_table.FV3_GFS_v15p2 | 2 +- ush/templates/field_table.FV3_GFS_v16 | 2 +- ush/templates/field_table.FV3_GSD_SAR | 6 +++--- ush/templates/field_table.FV3_GSD_v0 | 6 +++--- ush/templates/field_table.FV3_HRRR | 6 +++--- ush/templates/field_table.FV3_RRFS_v0 | 6 +++--- ush/templates/field_table.FV3_RRFS_v1alpha | 6 +++--- ush/templates/field_table.FV3_RRFS_v1beta | 6 +++--- ush/templates/input.nml.FV3 | 4 ++-- 11 files changed, 26 insertions(+), 25 deletions(-) diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 17b6d5d3d..59e3e156f 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -136,6 +136,7 @@ module purge machine=$(echo_lowercase $MACHINE) env_fp="${SR_WX_APP_TOP_DIR}/env/${BUILD_ENV_FN}" +module use "${SR_WX_APP_TOP_DIR}/env" source "${env_fp}" || print_err_msg_exit "\ Sourcing platform- and compiler-specific environment file (env_fp) for the workflow task specified by task_name failed: diff --git a/ush/templates/field_table.FV3_CPT_v0 b/ush/templates/field_table.FV3_CPT_v0 index e9dabfab7..7331ae4ae 100644 --- a/ush/templates/field_table.FV3_CPT_v0 +++ b/ush/templates/field_table.FV3_CPT_v0 @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/m3" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/m3" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic snow number concentration "TRACER", "atmos_mod", "snow_nc" "longname", "snow number concentration" @@ -63,5 +63,5 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_GFS_v15p2 b/ush/templates/field_table.FV3_GFS_v15p2 index 69cc9005f..06adb66d7 100644 --- a/ush/templates/field_table.FV3_GFS_v15p2 +++ b/ush/templates/field_table.FV3_GFS_v15p2 @@ -34,7 +34,7 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # non-prognostic cloud amount "TRACER", "atmos_mod", "cld_amt" "longname", "cloud amount" diff --git a/ush/templates/field_table.FV3_GFS_v16 b/ush/templates/field_table.FV3_GFS_v16 index 69cc9005f..06adb66d7 100644 --- a/ush/templates/field_table.FV3_GFS_v16 +++ b/ush/templates/field_table.FV3_GFS_v16 @@ -34,7 +34,7 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # non-prognostic cloud amount "TRACER", "atmos_mod", "cld_amt" "longname", "cloud amount" diff --git a/ush/templates/field_table.FV3_GSD_SAR b/ush/templates/field_table.FV3_GSD_SAR index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_GSD_SAR +++ b/ush/templates/field_table.FV3_GSD_SAR @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_GSD_v0 b/ush/templates/field_table.FV3_GSD_v0 index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_GSD_v0 +++ b/ush/templates/field_table.FV3_GSD_v0 @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_HRRR b/ush/templates/field_table.FV3_HRRR index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_HRRR +++ b/ush/templates/field_table.FV3_HRRR @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_RRFS_v0 b/ush/templates/field_table.FV3_RRFS_v0 index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_RRFS_v0 +++ b/ush/templates/field_table.FV3_RRFS_v0 @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_RRFS_v1alpha b/ush/templates/field_table.FV3_RRFS_v1alpha index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_RRFS_v1alpha +++ b/ush/templates/field_table.FV3_RRFS_v1alpha @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/field_table.FV3_RRFS_v1beta b/ush/templates/field_table.FV3_RRFS_v1beta index 0a927de45..fe96567e5 100644 --- a/ush/templates/field_table.FV3_RRFS_v1beta +++ b/ush/templates/field_table.FV3_RRFS_v1beta @@ -38,12 +38,12 @@ "TRACER", "atmos_mod", "ice_nc" "longname", "cloud ice water number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / # prognostic ozone mixing ratio tracer "TRACER", "atmos_mod", "o3mr" "longname", "ozone mixing ratio" @@ -62,4 +62,4 @@ "TRACER", "atmos_mod", "sgs_tke" "longname", "subgrid scale turbulent kinetic energy" "units", "m2/s2" - "profile_type", "fixed", "surface_value=1.e30" / + "profile_type", "fixed", "surface_value=0.0" / diff --git a/ush/templates/input.nml.FV3 b/ush/templates/input.nml.FV3 index 645e21601..450b1361a 100644 --- a/ush/templates/input.nml.FV3 +++ b/ush/templates/input.nml.FV3 @@ -60,7 +60,7 @@ d2_bg = 0.0 d2_bg_k1 = 0.20 d2_bg_k2 = 0.04 - d4_bg = 0.15 + d4_bg = 0.12 d_con = 1.0 d_ext = 0.0 dddmp = 0.1 @@ -100,7 +100,7 @@ ncep_ic = .false. nggps_ic = .true. no_dycore = .false. - nord = 2 + nord = 3 npz = 64 nrows_blend = 10 ntiles = 1 From dcf8b5680249bb4f27d96e305cd2e7c3c03c267c Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Fri, 28 Jan 2022 09:33:36 -0600 Subject: [PATCH 14/15] Add stand-alone Python data ingest tool. (#667) * Adding YAML file with fixed GFS data locations. * Use "download" as protocol. * Adding WIP script to retrieve data using yml. Does not work at all at this point. * Updates to allow working ingest. * Turn on logging and clean up after htar. * Adding support for RAP/HRRR/NAM * WIP. * Fixing paths, testing HRRR, error if nothing found. * Better handling of un-found dataset via wget. * Adding capability to copy files from disk. * Linting. * Addressing review concerns. --- ush/retrieve_data.py | 707 +++++++++++++++++++++++++++++++ ush/templates/data_locations.yml | 193 +++++++++ 2 files changed, 900 insertions(+) create mode 100644 ush/retrieve_data.py create mode 100644 ush/templates/data_locations.yml diff --git a/ush/retrieve_data.py b/ush/retrieve_data.py new file mode 100644 index 000000000..48210c0d5 --- /dev/null +++ b/ush/retrieve_data.py @@ -0,0 +1,707 @@ +# pylint: disable=logging-fstring-interpolation +''' +This script helps users pull data from known data streams, including +URLS and HPSS (only on supported NOAA platforms), or from user-supplied +data locations on disk. + +Several supported data streams are included in +ush/templates/data_locations.yml, which provides locations and naming +conventions for files commonly used with the SRW App. Provide the file +to this tool via the --config flag. Users are welcome to provide their +own file with alternative locations and naming conventions. + +When using this script to pull from disk, the user is required to +provide the path to the data location, which can include Python +templates. The file names follow those included in the --config file by +default, or can be user-supplied via the --file_name flag. That flag +takes a YAML-formatted string that follows the same conventions outlined +in the ush/templates/data_locations.yml file for naming files. + +To see usage for this script: + + python retrieve_data.py -h + +Also see the parse_args function below. +''' + +import argparse +import datetime as dt +import logging +import os +import shutil +import subprocess +import sys + +import yaml + +def clean_up_output_dir(expected_subdir, local_archive, output_path, source_paths): + + ''' Remove expected sub-directories and existing_archive files on + disk once all files have been extracted and put into the specified + output location. ''' + + unavailable = {} + # Check to make sure the files exist on disk + for file_path in source_paths: + local_file_path = os.path.join(output_path,file_path) + if not os.path.exists(local_file_path): + logging.info(f'File does not exist: {local_file_path}') + unavailable['hpss'] = source_paths + else: + file_name = os.path.basename(file_path) + expected_output_loc = os.path.join(output_path, file_name) + if not local_file_path == expected_output_loc: + logging.info(f'Moving {local_file_path} to ' \ + f'{expected_output_loc}') + shutil.move(local_file_path, expected_output_loc) + + # Clean up directories from inside archive, if they exist + if os.path.exists(expected_subdir) and expected_subdir != './': + logging.info(f'Removing {expected_subdir}') + os.removedirs(expected_subdir) + + # If an archive exists on disk, remove it + if os.path.exists(local_archive): + os.remove(local_archive) + + return unavailable + +def copy_file(source, destination): + + ''' + Copy a file from a source and place it in the destination location. + Return a boolean value reflecting the state of the copy. + + Assumes destination exists. + ''' + + if not os.path.exists(source): + logging.info(f'File does not exist on disk \n {source}') + return False + + # Using subprocess here because system copy is much faster than + # python copy options. + cmd = f'cp {source} {destination}' + logging.info(f'Running command: \n {cmd}') + try: + subprocess.run(cmd, + check=True, + shell=True, + ) + except subprocess.CalledProcessError as err: + logging.info(err) + return False + return True + +def download_file(url): + + ''' + Download a file from a url source, and place it in a target location + on disk. + + Arguments: + url url to file to be downloaded + + Return: + boolean value reflecting state of download. + ''' + + # wget flags: + # -c continue previous attempt + # -T timeout seconds + # -t number of tries + cmd = f'wget -c -T 30 -t 3 {url}' + logging.info(f'Running command: \n {cmd}') + try: + subprocess.run(cmd, + check=True, + shell=True, + ) + except subprocess.CalledProcessError as err: + logging.info(err) + return False + except: + logging.error('Command failed!') + raise + + return True + +def download_requested_files(cla, data_store, store_specs): + + ''' This function interacts with the "download" protocol in a + provided data store specs file to download a set of files requested + by the user. It calls download_file for each individual file that + should be downloaded. ''' + + base_urls = store_specs['url'] + base_urls = base_urls if isinstance(base_urls, list) else [base_urls] + + file_names = store_specs.get('file_names', {}) + if cla.file_type is not None: + file_names = file_names[cla.file_type] + file_names = file_names[cla.anl_or_fcst] + target_path = fill_template(cla.output_path, + cla.cycle_date) + + logging.info(f'Downloaded files will be placed here: \n {target_path}') + orig_path = os.getcwd() + os.chdir(target_path) + unavailable = {} + for base_url in base_urls: + for fcst_hr in cla.fcst_hrs: + for file_name in file_names: + url = os.path.join(base_url, file_name) + url = fill_template(url, cla.cycle_date, fcst_hr) + downloaded = download_file(url) + if not downloaded: + + if unavailable.get(data_store) is None: + unavailable[data_store] = [] + unavailable[data_store].append(target_path) + os.chdir(orig_path) + # Returning here assumes that if the first file + # isn't found, none of the others will be. Don't + # waste time timing out on every requested file. + return unavailable + os.chdir(orig_path) + return unavailable + +def fhr_list(args): + + ''' + Given an argparse list argument, return the sequence of forecast hours to + process. + + The length of the list will determine what forecast hours are returned: + + Length = 1: A single fhr is to be processed + Length = 2: A sequence of start, stop with increment 1 + Length = 3: A sequence of start, stop, increment + Length > 3: List as is + + argparse should provide a list of at least one item (nargs='+'). + + Must ensure that the list contains integers. + ''' + + args = args if isinstance(args, list) else [args] + arg_len = len(args) + if arg_len in (2, 3): + args[1] += 1 + return list(range(*args)) + + return args + +def fill_template(template_str, cycle_date, fcst_hr=0, + templates_only=False): + + ''' Fill in the provided template string with date time information, + and return the resulting string. + + Arguments: + template_str a string containing Python templates + cycle_date a datetime object that will be used to fill in + date and time information + fcst_hr an integer forecast hour. string formatting should + be included in the template_str + templates_only boolean value. When True, this function will only + return the templates available. + + Rerturn: + filled template string + ''' + + cycle_hour = cycle_date.strftime('%H') + # One strategy for binning data files at NCEP is to put them into 6 + # cycle bins. The archive file names include the low and high end of the + # range. Set the range as would be indicated in the archive file + # here. Integer division is intentional here. + low_end = int(cycle_hour) // 6 * 6 + bin6 = f'{low_end:02d}-{low_end+5:02d}' + + # Another strategy is to bundle odd cycle hours with their next + # lowest even cycle hour. Files are named only with the even hour. + # Integer division is intentional here. + hh_even = f'{int(cycle_hour) // 2 * 2:02d}' + + format_values = dict( + bin6=bin6, + fcst_hr=fcst_hr, + dd=cycle_date.strftime('%d'), + hh=cycle_hour, + hh_even=hh_even, + jjj=cycle_date.strftime('%j'), + mm=cycle_date.strftime('%m'), + yy=cycle_date.strftime('%y'), + yyyy=cycle_date.strftime('%Y'), + yyyymm=cycle_date.strftime('%Y%m'), + yyyymmdd=cycle_date.strftime('%Y%m%d'), + yyyymmddhh=cycle_date.strftime('%Y%m%d%H'), + ) + if templates_only: + return f'{",".join((format_values.keys()))}' + return template_str.format(**format_values) + +def find_archive_files(paths, file_names, cycle_date): + + ''' Given an equal-length set of archive paths and archive file + names, and a cycle date, check HPSS via hsi to make sure at least + one set exists. Return the path of the existing archive, along with + the item in set of paths that was found.''' + + zipped_archive_file_paths = zip(paths, file_names) + + # Narrow down which HPSS files are available for this date + for list_item, (archive_path, archive_file_names) in \ + enumerate(zipped_archive_file_paths): + + if not isinstance(archive_file_names, list): + archive_file_names = [archive_file_names] + + # Only test the first item in the list, it will tell us if this + # set exists at this date. + file_path = os.path.join(archive_path, archive_file_names[0]) + file_path = fill_template(file_path, cycle_date) + + existing_archive = hsi_single_file(file_path) + + if existing_archive: + logging.info(f'Found HPSS file: {file_path}') + return existing_archive, list_item + + return '', 0 + +def get_requested_files(cla, file_names, input_loc, method='disk'): + + ''' This function copies files from disk locations + or downloads files from a url, depending on the option specified for + user. + + This function expects that the output directory exists and is + writeable. + + Arguments: + + cla Namespace object containing command line arguments + file_names Dict of file names by file type and kind + input_loc A string containing a single data location, either a url + or disk path. + method Choice of disk or download to indicate protocol for + retrieval + + Returns + unavailable a dict whose keys are "method" and whose values are a + list of files unretrievable + ''' + + unavailable = {} + + if cla.file_type is not None: + file_names = file_names[cla.file_type] + file_names = file_names[cla.anl_or_fcst] + + file_names = file_names if isinstance(file_names, list) else \ + [file_names] + target_path = fill_template(cla.output_path, + cla.cycle_date) + + logging.info(f'Retrieved files will be placed here: \n {target_path}') + orig_path = os.getcwd() + os.chdir(target_path) + unavailable = {} + for fcst_hr in cla.fcst_hrs: + for file_name in file_names: + loc = os.path.join(input_loc, file_name) + loc = fill_template(loc, cla.cycle_date, fcst_hr) + + if method == 'disk': + retrieved = copy_file(loc, target_path) + + if method == 'download': + retrieved = download_file(loc) + + if not retrieved: + + if unavailable.get(method) is None: + unavailable[method] = [] + unavailable[method].append(target_path) + os.chdir(orig_path) + # Returning here assumes that if the first file + # isn't found, none of the others will be. Don't + # waste time timing out on every requested file. + return unavailable + os.chdir(orig_path) + return unavailable + +def hsi_single_file(file_path, mode='ls'): + + ''' Call hsi as a subprocess for Python and return information about + whether the file_path was found. + + Arguments: + file_path path on HPSS + mode the hsi command to run. ls is default. may also + pass "get" to retrieve the file path + + ''' + cmd = f'hsi {mode} {file_path}' + + logging.info(f'Running command \n {cmd}') + try: + subprocess.run(cmd, + check=True, + shell=True, + ) + except subprocess.CalledProcessError: + logging.warning(f'{file_path} is not available!') + return '' + + return file_path + +def hpss_requested_files(cla, store_specs): + + ''' This function interacts with the "hpss" protocol in a + provided data store specs file to download a set of files requested + by the user. Depending on the type of archive file (zip or tar), it + will either pull the entire file and unzip it, or attempt to pull + individual files from a tar file. + + It cleans up local disk after files are deemed available to remove + any empty subdirectories that may still be present. + + This function exepcts that the output directory exists and is + writable. + ''' + + archive_paths = store_specs['archive_path'] + archive_paths = archive_paths if isinstance(archive_paths, list) \ + else [archive_paths] + + # Could be a list of lists + archive_file_names = store_specs.get('archive_file_names', {}) + if cla.file_type is not None: + archive_file_names = archive_file_names[cla.file_type] + + if isinstance(archive_file_names, dict): + archive_file_names = archive_file_names[cla.anl_or_fcst] + + unavailable = {} + existing_archive = None + + logging.debug(f'Will try to look for: '\ + f' {list(zip(archive_paths, archive_file_names))}') + + existing_archive, which_archive = find_archive_files(archive_paths, + archive_file_names, + cla.cycle_date, + ) + + if not existing_archive: + logging.warning('No archive files were found!') + unavailable['archive'] = list(zip(archive_paths, archive_file_names)) + return unavailable + + # Use the found archive file path to get the necessary files + file_names = store_specs.get('file_names', {}) + if cla.file_type is not None: + file_names = file_names[cla.file_type] + file_names = file_names[cla.anl_or_fcst] + + logging.debug(f'Grabbing archive number {which_archive} in list.') + archive_internal_dir = store_specs.get('archive_internal_dir', [''])[which_archive] + archive_internal_dir = fill_template(archive_internal_dir, + cla.cycle_date) + + output_path = fill_template(cla.output_path, cla.cycle_date) + logging.info(f'Will place files in {os.path.abspath(output_path)}') + orig_path = os.getcwd() + os.chdir(output_path) + logging.debug(f'CWD: {os.getcwd()}') + + source_paths = [] + for fcst_hr in cla.fcst_hrs: + for file_name in file_names: + source_paths.append(fill_template( + os.path.join(archive_internal_dir, file_name), + cla.cycle_date, + fcst_hr, + )) + + if store_specs.get('archive_format', 'tar') == 'zip': + # Get the entire file from HPSS + existing_archive = hsi_single_file(existing_archive, mode='get') + + # Grab only the necessary files from the archive + cmd = f'unzip -o {os.path.basename(existing_archive)} {" ".join(source_paths)}' + + else: + cmd = f'htar -xvf {existing_archive} {" ".join(source_paths)}' + + logging.info(f'Running command \n {cmd}') + subprocess.run(cmd, + check=True, + shell=True, + ) + + # Check that files exist and Remove any data transfer artifacts. + unavailable = clean_up_output_dir( + expected_subdir=archive_internal_dir, + local_archive=os.path.basename(existing_archive), + output_path=output_path, + source_paths=source_paths, + ) + + os.chdir(orig_path) + + return unavailable + +def load_str(arg): + + ''' Load a dict string safely using YAML. Return the resulting dict. ''' + return yaml.load(arg, Loader=yaml.SafeLoader) + +def config_exists(arg): + + ''' + Check to ensure that the provided config file exists. If it does, load it + with YAML's safe loader and return the resulting dict. + ''' + + # Check for existence of file + if not os.path.exists(arg): + msg = f'{arg} does not exist!' + raise argparse.ArgumentTypeError(msg) + + with open(arg, 'r') as config_path: + cfg = yaml.load(config_path, Loader=yaml.SafeLoader) + return cfg + +def path_exists(arg): + + ''' Check whether the supplied path exists and is writeable ''' + + if not os.path.exists(arg): + msg = f'{arg} does not exist!' + raise argparse.ArgumentTypeError(msg) + + if not os.access(arg, os.X_OK|os.W_OK): + logging.error(f'{arg} is not writeable!') + raise argparse.ArgumentTypeError(msg) + + return arg + +def setup_logging(debug=False): + + ''' Calls initialization functions for logging package, and sets the + user-defined level for logging in the script.''' + + level = logging.WARNING + if debug: + level = logging.DEBUG + + logging.basicConfig(format='%(levelname)s: %(message)s \n ', level=level) + if debug: + logging.info('Logging level set to DEBUG') + + + +def to_datetime(arg): + ''' Return a datetime object give a string like YYYYMMDDHH. + ''' + + return dt.datetime.strptime(arg, '%Y%m%d%H') + +def to_lower(arg): + ''' Return a string provided by arg into all lower case. ''' + return arg.lower() + +def main(cla): + ''' + Uses known location information to try the known locations and file + paths in priority order. + ''' + + setup_logging(cla.debug) + + known_data_info = cla.config.get(cla.external_model) + if known_data_info is None: + msg = ('No data stores have been defined for', + f'{cla.external_model}!') + raise KeyError(msg) + + unavailable = {} + for data_store in cla.data_stores: + logging.info(f'Checking {data_store} for {cla.external_model}') + store_specs = known_data_info.get(data_store, {}) + + if data_store == 'disk': + file_names = cla.file_names if cla.file_names else \ + known_data_info.get('hpss', {}).get('file_names') + logging.debug(f'User supplied file names are: {file_names}') + if not file_names: + msg = ('No file name found. They must be provided \ + either on the command line or on in a config file.') + raise argparse.ArgumentTypeError(msg) + unavailable = get_requested_files(cla, + file_names=file_names, + input_loc=cla.input_file_path, + method='disk', + ) + + elif not store_specs: + msg = (f'No information is available for {data_store}.') + raise KeyError(msg) + + if store_specs.get('protocol') == 'download': + file_names = store_specs.get('file_names') + if not file_names: + msg = ('No file name found. They must be provided \ + either on the command line or on in a config file.') + raise argparse.ArgumentTypeError(msg) + + unavailable = get_requested_files(cla, + file_names=file_names, + input_loc=store_specs['url'], + method='download', + ) + + if store_specs.get('protocol') == 'htar': + unavailable = hpss_requested_files(cla, store_specs) + + if not unavailable: + # All files are found. Stop looking! + break + + logging.warning(f'Requested files are unavialable from {data_store}') + + if unavailable: + logging.error('Could not find any of the requested files.') + sys.exit(1) + +def parse_args(): + + ''' + Function maintains the arguments accepted by this script. Please see + Python's argparse documenation for more information about settings of each + argument. + ''' + + description=( + 'Allowable Python templates for paths, urls, and file names are '\ + ' defined in the fill_template function and include:\n' \ + f'{"-"*120}\n' \ + f'{fill_template("null", dt.datetime.now(), templates_only=True)}') + parser = argparse.ArgumentParser( + description=description, + ) + + # Required + parser.add_argument( + '--anl_or_fcst', + choices=('anl', 'fcst'), + help='Flag for whether analysis or forecast \ + files should be gathered', + required=True, + ) + parser.add_argument( + '--config', + help='Full path to a configuration file containing paths and \ + naming conventions for known data streams. The default included \ + in this repository is in ush/templates/data_locations.yml', + type=config_exists, + ) + parser.add_argument( + '--cycle_date', + help='Cycle date of the data to be retrieved in YYYYMMDDHH \ + format.', + required=True, + type=to_datetime, + ) + parser.add_argument( + '--data_stores', + help='List of priority data_stores. Tries first list item \ + first. Choices: hpss, nomads, aws, disk', + nargs='*', + required=True, + type=to_lower, + ) + parser.add_argument( + '--external_model', + choices=('FV3GFS', 'GSMGFS', 'HRRR', 'NAM', 'RAP', 'RAPx', + 'HRRRx'), + help='External model label. This input is case-sensitive', + required=True, + ) + parser.add_argument( + '--fcst_hrs', + help='A list describing forecast hours. If one argument, \ + one fhr will be processed. If 2 or 3 arguments, a sequence \ + of forecast hours [start, stop, [increment]] will be \ + processed. If more than 3 arguments, the list is processed \ + as-is.', + nargs='+', + required=True, + type=int, + ) + parser.add_argument( + '--output_path', + help='Path to a location on disk. Path is expected to exist.', + required=True, + type=os.path.abspath, + ) + + # Optional + parser.add_argument( + '--debug', + action='store_true', + help='Print debug messages', + ) + parser.add_argument( + '--file_names', + help='A YAML-formatted string that indicates the naming \ + convention the be used for the files retrieved from disk. If \ + not provided, the default names from hpss are used.', + type=load_str, + ) + parser.add_argument( + '--file_type', + choices=('grib2', 'nemsio', 'netcdf'), + help='External model file format', + ) + parser.add_argument( + '--input_file_path', + help='A path to data stored on disk. The path may contain \ + Python templates. File names may be supplied using the \ + --file_names flag, or the default naming convention will be \ + taken from the --config file.', + nargs='*', + ) + return parser.parse_args() + +if __name__ == '__main__': + + CLA = parse_args() + CLA.output_path = path_exists(CLA.output_path) + CLA.fcst_hrs = fhr_list(CLA.fcst_hrs) + + if 'disk' in CLA.data_stores: + # Make sure a path was provided. + if not CLA.input_file_path: + raise argparse.ArgumentTypeError( + ('You must provide an input_file_path when choosing ' \ + ' disk as a data store!')) + + if 'hpss' in CLA.data_stores: + # Make sure hpss module is loaded + try: + output = subprocess.run('which hsi', + check=True, + shell=True, + capture_output=True, + ) + except subprocess.CalledProcessError: + logging.error('You requested the hpss data store, but ' \ + 'the HPSS module isn\'t loaded. This data store ' \ + 'is only available on NOAA compute platforms.') + + main(CLA) diff --git a/ush/templates/data_locations.yml b/ush/templates/data_locations.yml new file mode 100644 index 000000000..23354e171 --- /dev/null +++ b/ush/templates/data_locations.yml @@ -0,0 +1,193 @@ +# This file tracks known data store locations for data used as input to +# the SRW App configuations. +# +# The contents should be organized as follows: +# +# Top level: A key corresponding to an external model file or +# observation dataset accepted by the SRW App. +# +# 2nd level: A key corressponding to a named datastore for a given data +# set. This could be nomads, aws, hpss, etc. +# +# 3rd level required: +# +# protocol: +# indication of how a set of files will be retrieved. Options: +# download or htar. The requirements for each protocol are described +# below +# +# file_names: +# Each data store will require an entry that describes the names of +# files to be extracted from the data store. These entries are +# expected as lists to indicate when multiple files should be +# retrieved. anl and fcst subsections indicate the naming convention +# for that type of file. Both are required for external models ICS and +# LBCS. Each file name may (and likely should) include templates. +# +# If the SRW App accepts different file formats, +# as it does for FV3GFS, an intermediary level indicating the value +# associate with a FV3GFS_FILE_FMT_* variable. +# +# 3rd level optional: +# +# for download protocol: +# url: required. the URL to the location of the data file. May include +# templates. +# +# for htar protocol: +# archive_path: a list of paths to the potential location of the +# archive file on HPSS. since there may be multiple +# options for a given external model file, the list +# will be searched in order listed below. +# archive_file_names: the name of the archive file. this could +# differ for each archive_path option, so one entry is +# needed (even if it's identical) for each entry of +# archive_path provided +# archive_internal_dir: (optional) a path to data files stored +# inside a given archive file +# +# +# +FV3GFS: + nomads: + protocol: download + url: https://nomads.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.{yyyymmdd}/{hh}/atmos + file_names: &gfs_file_names + grib2: + anl: + - gfs.t{hh}z.pgrb2.0p25.f000 + fcst: + - gfs.t{hh}z.pgrb2.0p25.f{fcst_hr:03d} + nemsio: + anl: + - gfs.t{hh}z.atmanl.nemsio + - gfs.t{hh}z.sfcanl.nemsio + fcst: + - gfs.t{hh}z.atmf{fcst_hr:03d}.nemsio + - gfs.t{hh}z.sfcf{fcst_hr:03d}.nemsio + hpss: + protocol: htar + archive_path: + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyymm}/{yyyymmdd} + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyymm}/{yyyymmdd} + archive_internal_dir: + - ./gfs.{yyyymmdd}/{hh} + - ./gfs.{yyyymmdd}/{hh}/atmos + archive_file_names: + grib2: + anl: + - gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_pgrb2.tar + - com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_pgrb2.tar + fcst: + - gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_pgrb2.tar + - com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_pgrb2.tar + nemsio: + anl: + - gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsioa.tar + - com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsioa.tar + fcst: + - ['gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsioa.tar', 'gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsiob.tar'] + - ['com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsioa.tar', 'com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nemsiob.tar'] + netcdf: + anl: + - gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nca.tar + - com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nca.tar + fcst: + - ['gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nca.tar', 'gpfs_dell1_nco_ops_com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_ncb.tar'] + - ['com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_nca.tar', 'com_gfs_prod_gfs.{yyyymmdd}_{hh}.gfs_ncb.tar'] + file_names: + <<: *gfs_file_names + aws: + protocol: download + url: https://noaa-gfs-bdp-pds.s3.amazonaws.com/gfs.{yyyymmdd}/{hh}/atmos + file_names: + <<: *gfs_file_names + +RAP: + hpss: + protocol: htar + archive_format: tar + archive_path: + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyymm}/{yyyymmdd} + archive_internal_dir: + - ./ + archive_file_names: + # RAP forecasts are binned into 6 hour tar files. + - com_rap_prod_rap.{yyyymmdd}{bin6}.wrf.tar + file_names: &rap_file_names + anl: + - rap.t{hh}z.wrfnatf{fcst_hr:02d}.grib2 + fcst: + - rap.t{hh}z.wrfnatf{fcst_hr:02d}.grib2 + aws: + protocol: download + url: https://noaa-rap-pds.s3.amazonaws.com/rap.{yyyymmdd} + file_names: + <<: *rap_file_names + +RAPx: + hpss: + protocol: htar + archive_format: zip + archive_path: + - /BMC/fdr/Permanent/{yyyy}/{mm}/{dd}/data/fsl/rap/full/wrfnat + archive_file_names: + # RAPx bins two cycles togehter, and named by the lower even value + # of the cycle hour. + - '{yyyymmdd}{hh_even}00.zip' + file_names: + anl: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + fcst: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + +HRRR: + hpss: + protocol: htar + archive_format: tar + archive_path: + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyymm}/{yyyymmdd} + archive_internal_dir: + - ./ + archive_file_names: + # HRRR forecasts are binned into 6 hour tar files. + - com_hrrr_prod_hrrr.{yyyymmdd}_conus{bin6}.wrfnatdng.tar + file_names: &hrrr_file_names + anl: + - hrrr.t{hh}z.wrfnatf{fcst_hr:02d}.grib2 + fcst: + - hrrr.t{hh}z.wrfnatf{fcst_hr:02d}.grib2 + aws: + protocol: download + url: https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.{yyyymmdd}/conus/ + file_names: + <<: *hrrr_file_names + +HRRRx: + hpss: + protocol: htar + archive_format: zip + archive_path: + - /BMC/fdr/Permanent/{yyyy}/{mm}/{dd}/data/fsl/hrrr/conus/wrfnat + archive_file_names: + - '{yyyymmddhh}00.zip' + file_names: + anl: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + fcst: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + +NAM: + hpss: + protocol: htar + archive_format: tar + archive_path: + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyymm}/{yyyymmdd} + archive_file_names: + - com_nam_prod_nam.{yyyymmddhh}.bgrid.tar + file_names: + anl: + - nam.t{hh}z.bgrdsf{fcst_hr:03d}.tm00 + fcst: + - nam.t{hh}z.bgrdsf{fcst_hr:03d} + From e1b44944ee870a10510d259e042433af859138d5 Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Mon, 31 Jan 2022 17:52:41 -0600 Subject: [PATCH 15/15] Fixing path to input data on Jet. (#669) --- tests/WE2E/run_WE2E_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index dd1f50613..15db83c3d 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -938,7 +938,7 @@ PTMP=\"${PTMP}\"" elif [ "$MACHINE" = "HERA" ]; then extrn_mdl_source_basedir="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/staged_extrn_mdl_files" elif [ "$MACHINE" = "JET" ]; then - extrn_mdl_source_basedir="/mnt/lfs1/BMC/fim/Gerard.Ketefian/UFS_CAM/staged_extrn_mdl_files" + extrn_mdl_source_basedir="/mnt/lfs1/BMC/gsd-fv3/Gerard.Ketefian/UFS_CAM/staged_extrn_mdl_files" elif [ "$MACHINE" = "CHEYENNE" ]; then extrn_mdl_source_basedir="/glade/p/ral/jntp/UFS_SRW_app/staged_extrn_mdl_files" elif [ "$MACHINE" = "ORION" ]; then