From d39c2ca43ea326929a1170cca018f74e792b7f07 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 17 Nov 2023 00:46:09 +0000 Subject: [PATCH 01/24] create a feature branch AQMv7_release_v7 to update J-job and ex-scripts --- jobs/JAQM_BIAS_CORRECTION_O3 | 92 ++++---- jobs/JAQM_BIAS_CORRECTION_PM25 | 96 +++++---- jobs/JAQM_DATA_CLEANUP | 7 +- jobs/JAQM_FIRE_EMISSION | 92 ++++---- jobs/{JAQM_RUN_FCST => JAQM_FORECAST} | 95 +++++---- jobs/{JAQM_AQM_ICS => JAQM_ICS} | 91 ++++---- jobs/{JAQM_AQM_LBCS => JAQM_LBCS} | 90 ++++---- jobs/JAQM_MAKE_ICS | 82 +++++--- jobs/JAQM_MAKE_LBCS | 79 ++++--- jobs/JAQM_NEXUS_EMISSION | 96 +++++---- jobs/JAQM_NEXUS_POST_SPLIT | 87 +++++--- jobs/JAQM_POINT_SOURCE | 80 ++++--- jobs/{JAQM_RUN_POST => JAQM_POST} | 117 ++++++----- jobs/JAQM_POST_STAT_O3 | 97 +++++---- jobs/JAQM_POST_STAT_PM25 | 93 +++++---- jobs/JAQM_PRE_POST_STAT | 86 ++++---- parm/FV3LAM_wflow.xml | 18 +- parm/config/var_defns.sh.nco.static | 4 +- scripts/exaqm_bias_correction_o3.sh | 70 +++---- scripts/exaqm_bias_correction_pm25.sh | 65 +++--- scripts/exaqm_data_cleanup.sh | 2 +- scripts/exaqm_fire_emission.sh | 12 +- .../{exaqm_run_fcst.sh => exaqm_forecast.sh} | 32 +-- scripts/{exaqm_aqm_ics.sh => exaqm_ics.sh} | 12 +- scripts/{exaqm_aqm_lbcs.sh => exaqm_lbcs.sh} | 22 +- scripts/exaqm_make_ics.sh | 196 +++--------------- scripts/exaqm_make_lbcs.sh | 91 ++------ scripts/exaqm_nexus_emission.sh | 22 +- scripts/exaqm_nexus_post_split.sh | 14 +- scripts/exaqm_point_source.sh | 11 +- scripts/{exaqm_run_post.sh => exaqm_post.sh} | 20 +- scripts/exaqm_post_stat_o3.sh | 36 ++-- scripts/exaqm_post_stat_pm25.sh | 49 ++--- scripts/exaqm_pre_post_stat.sh | 12 +- ush/auto_A1_cp_fix_link_fix_lam.sh | 57 ----- ush/auto_A2_cp_fix.sh | 11 + 36 files changed, 1071 insertions(+), 1065 deletions(-) rename jobs/{JAQM_RUN_FCST => JAQM_FORECAST} (61%) rename jobs/{JAQM_AQM_ICS => JAQM_ICS} (66%) rename jobs/{JAQM_AQM_LBCS => JAQM_LBCS} (65%) rename jobs/{JAQM_RUN_POST => JAQM_POST} (70%) rename scripts/{exaqm_run_fcst.sh => exaqm_forecast.sh} (98%) rename scripts/{exaqm_aqm_ics.sh => exaqm_ics.sh} (97%) rename scripts/{exaqm_aqm_lbcs.sh => exaqm_lbcs.sh} (96%) rename scripts/{exaqm_run_post.sh => exaqm_post.sh} (97%) delete mode 100755 ush/auto_A1_cp_fix_link_fix_lam.sh create mode 100755 ush/auto_A2_cp_fix.sh diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 0b33006a00..80b6691264 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -1,49 +1,71 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-O3. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + +export PCOM=${PCOM:-${COMOUT}/wmo} +export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" + +export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} +export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -55,14 +77,12 @@ This is the J-job script for the task that runs BIAS-CORRECTION-O3. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" -mkdir -p ${COMOUTwmo} +mkdir -p ${PCOM} export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" @@ -76,23 +96,25 @@ export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} # #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_bias_correction_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_bias_correction_o3.sh +export err=$?; err_chk # -# Run job postamble. -# -#----------------------------------------------------------------------- # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index 8dc0400f85..cdbcddc7bd 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -1,39 +1,63 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-PM25. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" + +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export PCOM=${PCOM:-${COMOUT}/wmo} + +export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} +export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) @@ -41,9 +65,7 @@ scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -55,44 +77,42 @@ This is the J-job script for the task that runs BIAS-CORRECTION-PM25. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" -mkdir -p ${COMOUTwmo} +mkdir -p ${PCOM} export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} -# + +env #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_bias_correction_pm25.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +startmsg +${HOMEaqm}/scripts/exaqm_bias_correction_pm25.sh +export err=$?; err_chk # -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_DATA_CLEANUP b/jobs/JAQM_DATA_CLEANUP index 3426e1ddb7..89abf0bbc7 100755 --- a/jobs/JAQM_DATA_CLEANUP +++ b/jobs/JAQM_DATA_CLEANUP @@ -1,8 +1,8 @@ #!/bin/bash -set -x -export PS4='$SECONDS + ' date +export PS4='$SECONDS + ' +set -xue ############################################## # Obtain unique process id (pid) and make temp directory @@ -18,9 +18,12 @@ cd $DATA # Run setpdy and initialize PDY variables ############################################## export cycle="t${cyc}z" +export CDATE=${PDY}${cyc} + setpdy.sh . ./PDY +env ############################################## # Set variables used in the script ############################################## diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index 30e5032491..dd61c6484f 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -1,49 +1,64 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script gets fire emission data files from disk or HPSS -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +export CDATE=${PDY}${cyc} +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} +export FIXaqmfire="${FIXaqmfire:-${HOMEaqm}/fix/fire}" + +export DCOMROOT=${DCOMROOT:-/lfs/h1/ops/prod/dcom} +export DCOMINfire="${DCOMINfire:-${DCOMROOT}/${PDY}/rave}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -56,9 +71,7 @@ emission data files from disk, or HPSS. ========================================================================" # #----------------------------------------------------------------------- -# # Set the external model start time -# #----------------------------------------------------------------------- # export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} @@ -68,10 +81,8 @@ export FIRE_FILE_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIM # #----------------------------------------------------------------------- -# # Check whether FIRE EMISSION data files are available on the specified # cycle date and time on HPSS (FIRE_FILE_CDATE). -# #----------------------------------------------------------------------- # CDATE_min="2022101500" @@ -85,44 +96,39 @@ CDATE_min: \"${CDATE_min}\" fi # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_FIRE_EMISSION}" # #----------------------------------------------------------------------- -# # Create the directory where the RAVE fire emission files should be stored -# #----------------------------------------------------------------------- # export FIRE_EMISSION_STAGING_DIR="${FIRE_EMISSION_STAGING_DIR:-${COMIN}/$cyc/FIRE_EMISSION}" mkdir -p ${FIRE_EMISSION_STAGING_DIR} # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_fire_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_fire_emission.sh +export err=$?; err_chk # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/function. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_RUN_FCST b/jobs/JAQM_FORECAST similarity index 61% rename from jobs/JAQM_RUN_FCST rename to jobs/JAQM_FORECAST index ee9fabcfec..4f31710d21 100755 --- a/jobs/JAQM_RUN_FCST +++ b/jobs/JAQM_FORECAST @@ -1,51 +1,66 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script copies files from various directories into the experiment -# directory, creates links to some of them, and modifies others (e.g. -# templates) to customize them for the current experiment setup. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +export CDATE=${PDY}${cyc} + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -58,9 +73,7 @@ the specified cycle. ========================================================================" # #----------------------------------------------------------------------- -# # Create the INPUT and RESTART directories under the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" @@ -70,29 +83,27 @@ mkdir -p ${DATA}/RESTART export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_run_fcst.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_forecast.sh +export err=$?; err_chk # -job_postamble "FALSE" +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_AQM_ICS b/jobs/JAQM_ICS similarity index 66% rename from jobs/JAQM_AQM_ICS rename to jobs/JAQM_ICS index 0a60d66e61..4bb2b18ae3 100755 --- a/jobs/JAQM_AQM_ICS +++ b/jobs/JAQM_ICS @@ -1,50 +1,62 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script adds extra species for air quality modeling (AQM) to the -# initial conditions (ICs). -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} +mkdir -p ${DATA} + +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -58,45 +70,40 @@ which the model needs. ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_ICS}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary variables. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_aqm_ics.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +startmsg +${HOMEaqm}/scripts/exaqm_ics.sh +export err=$?; err_chk # +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" + #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/function. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_AQM_LBCS b/jobs/JAQM_LBCS similarity index 65% rename from jobs/JAQM_AQM_LBCS rename to jobs/JAQM_LBCS index e4fb117672..cfc5248f16 100755 --- a/jobs/JAQM_AQM_LBCS +++ b/jobs/JAQM_LBCS @@ -1,50 +1,65 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script adds extra species for air quality modeling (AQM) to the -# lateral boundary conditions (LBCs) files. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + +export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" +export FIXaqmchem_lbcs="${FIXaqmchem_lbcs:-${HOMEaqm}/fix/chem_lbcs}" + # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -58,45 +73,40 @@ which the model needs. ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_LBCS}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_aqm_lbcs.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_lbcs.sh +export err=$?; err_chk # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/function. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 04946d471c..8e8dedfae2 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -1,41 +1,64 @@ -#!/bin/bash +#!/bin/bash + +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +export CDATE=${PDY}${cyc} + +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -67,9 +90,7 @@ mkdir -p ${INPUT_DATA} DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" # #----------------------------------------------------------------------- -# # Set environment only when RUN_TASK_GET_EXTRN_ICS is false. -# #----------------------------------------------------------------------- # if [ "${RUN_TASK_GET_EXTRN_ICS}" = "FALSE" ]; then @@ -83,27 +104,26 @@ if [ "${RUN_TASK_GET_EXTRN_ICS}" = "FALSE" ]; then fi # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary variables. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_make_ics.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_make_ics.sh +export err=$?; err_chk # -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/function. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 55adc95204..e2f6f327d3 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -1,16 +1,45 @@ #!/bin/bash +date +export PS4=' $SECONDS + ' +set -xue + # #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +export CDATE=${PDY}${cyc} + +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" + +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + #----------------------------------------------------------------------- # # Save current shell options (in a global array). Then set new options @@ -18,24 +47,21 @@ source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_F # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -67,9 +93,7 @@ mkdir -p ${INPUT_DATA} DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}" # #----------------------------------------------------------------------- -# # Set environment only when RUN_TASK_GET_EXTRN_LBCS is false. -# #----------------------------------------------------------------------- # if [ "${RUN_TASK_GET_EXTRN_LBCS}" = "FALSE" ]; then @@ -83,27 +107,26 @@ if [ "${RUN_TASK_GET_EXTRN_LBCS}" = "FALSE" ]; then fi # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_make_lbcs.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_make_lbcs.sh +export err=$?; err_chk # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/function. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 19e59581c5..8ff6a0022a 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -1,49 +1,71 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script generate NEXUS emission netcdf file. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export FIXaqmconfig="${FIXaqmbio:-${HOMEaqm}/fix/aqm/epa/data}" +export FIXaqmbio="${FIXaqmbio:-${HOMEaqm}/fix/bio}" +export FIXaqmdust="${FIXaqmdust:-${HOMEaqm}/fix/dust}" +export FIXaqmcanopy="${FIXaqmcanopy:-${HOMEaqm}/fix/canopy}" +export FIXaqmnexus="${FIXaqmnexus:-${HOMEaqm}/fix/nexus}" +export FIXaqmnexus_gfs_sfc="${FIXaqmnexus:-${HOMEaqm}/fix/gfs}" + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINemis="${COMINemis:-${COMIN}/emission}" +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -56,27 +78,21 @@ using NEXUS which will output for FV3 (in NetCDF format). ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}/NEXUS}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_EMISSION_${nspt}}" # #----------------------------------------------------------------------- -# # Set environment only when RUN_TASK_NEXUS_GFS_SFC is false. -# #----------------------------------------------------------------------- # if [ "${RUN_TASK_NEXUS_GFS_SFC}" = "FALSE" ]; then @@ -89,23 +105,23 @@ if [ "${RUN_TASK_NEXUS_GFS_SFC}" = "FALSE" ]; then fi # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_nexus_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +startmsg +${HOMEaqm}/scripts/exaqm_nexus_emission.sh +export err=$?; err_chk # +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" #----------------------------------------------------------------------- # # Restore the shell options saved at the beginning of this script/func- diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index d53a6edb8e..4715f3bf7b 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -1,41 +1,71 @@ #!/bin/bash -# +date +export PS4=' $SECONDS + ' +set -xue + #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export FIXaqmconfig="${FIXaqmbio:-${HOMEaqm}/fix/aqm/epa/data}" +export FIXaqmbio="${FIXaqmbio:-${HOMEaqm}/fix/bio}" +export FIXaqmdust="${FIXaqmdust:-${HOMEaqm}/fix/dust}" +export FIXaqmcanopy="${FIXaqmcanopy:-${HOMEaqm}/fix/canopy}" +export FIXaqmnexus="${FIXaqmnexus:-${HOMEaqm}/fix/nexus}" +export FIXaqmnexus_gfs_sfc="${FIXaqmnexus:-${HOMEaqm}/fix/gfs}" + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINemis="${COMINemis:-${COMIN}/emission}" +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -48,19 +78,15 @@ using NEXUS which will output for FV3 (in NetCDF format). ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}" @@ -71,23 +97,24 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}" # #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_nexus_post_split.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. +startmsg +${HOMEaqm}/scripts/exaqm_nexus_post_split.sh +export err=$?; err_chk # -#----------------------------------------------------------------------- -# -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 0575d087b6..032f6d9a73 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -1,25 +1,48 @@ #!/bin/bash -# +date +export PS4=' $SECONDS + ' +set -xue + #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export COMINemispt="${COMINemispt:-${COMIN}/emission/pt}" +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 #----------------------------------------------------------------------- # # Get the full path to the file in which this script/function is located @@ -27,15 +50,13 @@ source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_V # which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- -# scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -47,41 +68,38 @@ This is the J-job script for the task that generates the point source files. ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POINT_SOURCE}" # # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_point_source.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_point_source.sh +export err=$?; err_chk # -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- # diff --git a/jobs/JAQM_RUN_POST b/jobs/JAQM_POST similarity index 70% rename from jobs/JAQM_RUN_POST rename to jobs/JAQM_POST index e2ebc48d6e..8424139b8d 100755 --- a/jobs/JAQM_RUN_POST +++ b/jobs/JAQM_POST @@ -1,50 +1,69 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3-LAM model. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" # #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -57,9 +76,7 @@ on the output files corresponding to a specified forecast hour. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/${cyc}}" @@ -72,24 +89,6 @@ if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then export fmn="00" fi # -#----------------------------------------------------------------------- -# -# If it doesn't already exist, create the directory (COMOUT) in which -# to store post-processing output. (Note that COMOUT may have already -# been created by this post-processing script for a different output time -# from the same forecast start time and/or ensemble member.) Also, create -# a temporary work directory (DATA_FHR) for the current output time. This -# will be deleted later after the processing for the current output time -# is complete. Then change location to DATA_FHR. -# -# Note that there may be a preexisting version of DATA_FHR from previous -# runs of this script for the current forecast hour (and current forecast -# start time), e.g. from the workflow task that runs this script failing -# and then being called again. Thus, we first make sure preexisting -# versions are deleted. -# -#----------------------------------------------------------------------- -# mkdir -p ${COMOUT} if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then @@ -102,9 +101,7 @@ mkdir -p ${DATA_FHR} cd ${DATA_FHR} # #----------------------------------------------------------------------- -# # Make sure that fhr is a non-empty string consisting of only digits. -# #----------------------------------------------------------------------- # export fhr=$( printf "%s" "${fhr}" | $SED -n -r -e "s/^([0-9]+)$/\1/p" ) @@ -116,22 +113,18 @@ digits: fi # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_run_post.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +startmsg +${HOMEaqm}/scripts/exaqm_post.sh +export err=$?; err_chk # #----------------------------------------------------------------------- -# # Create a flag file to make rocoto aware that the run_post task has # successfully completed. This flag is necessary for varying forecast # hours (FCST_LEN_HRS: -1) -# #----------------------------------------------------------------------- # if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then @@ -147,24 +140,30 @@ if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then fi fi # -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) if [ "${fhr}" = "${fcst_len_hrs}" ]; then - job_postamble "TRUE" + if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA + share_pid="${WORKFLOW_ID}_${PDY}${cyc}" + rm -rf *${share_pid} + fi else - job_postamble + if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA + fi fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index cc0c2754cc..8819203b0a 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -1,49 +1,65 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs POST-STAT-O3. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" + +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export PCOM=${PCOM:-${COMOUT}/wmo} + #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -55,41 +71,38 @@ This is the J-job script for the task that runs POST-STAT-O3. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" -mkdir -p ${COMOUTwmo} +mkdir -p ${PCOM} export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_post_stat_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_post_stat_o3.sh +export err=$?; err_chk # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index c0e3673b9c..817a151e6b 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -1,49 +1,67 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs POST-STAT-PM25. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export SENDDBN=${SENDDBN:-YES} +export SENDECF=${SENDECF:-YES} +export SENDCOM=${SENDCOM:-YES} +export SENDDBN_NTC=${SENDDBN_NTC:-NO} + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + +export PCOM=${PCOM:-${COMOUT}/wmo} + #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -55,41 +73,38 @@ This is the J-job script for the task that runs POST-UPP-STAT. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" -mkdir -p ${COMOUTwmo} +mkdir -p ${PCOM} export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exaqm_post_stat_pm25.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# #----------------------------------------------------------------------- # -# Run job postamble. -# -#----------------------------------------------------------------------- +startmsg +${HOMEaqm}/scripts/exaqm_post_stat_pm25.sh +export err=$?; err_chk # -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index a8b2c64ac7..a36e09bbc3 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -1,49 +1,62 @@ #!/bin/bash -# -#----------------------------------------------------------------------- -# -# This script runs PRE-POST-STAT. -# -#----------------------------------------------------------------------- -# +date +export PS4=' $SECONDS + ' +set -xue -# #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +. $USHaqm/source_util_funcs.sh source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# + +export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" +export DATA=${DATA:-${DATAROOT}/${jobid}} + +mkdir -p ${DATA} +cd ${DATA} + +export cycle=${cycle:-t${cyc}z} +export CDATE=${PDY}${cyc} + +setpdy.sh +. ./PDY + +. $USHaqm/job_preamble.sh + +export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} +export UTILaqm=${UTILaqm:-${HOMEaqm}/util} + +export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" +export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" +export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" +export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + #----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. -# #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- # scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# + +env #----------------------------------------------------------------------- -# # Print message indicating entry into script. -# #----------------------------------------------------------------------- # print_info_msg " @@ -55,37 +68,34 @@ This is the J-job script for the task that runs POST-UPP-STAT. ========================================================================" # #----------------------------------------------------------------------- -# # Set the run directory. -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_PRE_POST_STAT}" # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# #----------------------------------------------------------------------- # -$SCRIPTSdir/exaqm_pre_post_stat.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +startmsg +${HOMEaqm}/scripts/exaqm_pre_post_stat.sh +export err=$?; err_chk # -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble +if [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + rm -rf $DATA +fi +# Print exit message +print_info_msg " +======================================================================== +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" # #----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. -# #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 diff --git a/parm/FV3LAM_wflow.xml b/parm/FV3LAM_wflow.xml index 998350fcaf..97a682db54 100644 --- a/parm/FV3LAM_wflow.xml +++ b/parm/FV3LAM_wflow.xml @@ -654,7 +654,7 @@ MODULES_RUN_TASK_FP script. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JAQM_AQM_ICS" + &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JAQM_ICS" {{ nnodes_aqm_ics }}:ppn={{ ppn_aqm_ics }} {{ wtime_aqm_ics }} &NCORES_PER_NODE; @@ -689,7 +689,7 @@ MODULES_RUN_TASK_FP script. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JAQM_AQM_ICS" + &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JAQM_ICS" {{ nnodes_aqm_ics }}:ppn={{ ppn_aqm_ics }} {{ wtime_aqm_ics }} &NCORES_PER_NODE; @@ -725,7 +725,7 @@ MODULES_RUN_TASK_FP script. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_LBCS;" "&JOBSdir;/JAQM_AQM_LBCS" + &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_LBCS;" "&JOBSdir;/JAQM_LBCS" {{ nnodes_aqm_lbcs }}:ppn={{ ppn_aqm_lbcs }} {{ wtime_aqm_lbcs }} &NCORES_PER_NODE; @@ -757,7 +757,7 @@ MODULES_RUN_TASK_FP script. &RSRV_FCST; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_FCST;" "&JOBSdir;/JAQM_RUN_FCST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_FCST;" "&JOBSdir;/JAQM_FORECAST" {%- if machine in ["JET", "HERA", "LINUX"] %} {{ ncores_run_fcst }} {{ native_run_fcst }} @@ -835,7 +835,7 @@ MODULES_RUN_TASK_FP script. {%- endif %} &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_POST" {{ nnodes_run_post }}:ppn={{ ppn_run_post }} {{ wtime_run_post }} &NCORES_PER_NODE; @@ -899,7 +899,7 @@ later below for other output times. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_POST" {{ nnodes_run_post }}:ppn={{ ppn_run_post }} {{ wtime_run_post }} &NCORES_PER_NODE; @@ -958,7 +958,7 @@ for other output times. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_POST" {{ nnodes_run_post }}:ppn={{ ppn_run_post }} {{ wtime_run_post }} &NCORES_PER_NODE; @@ -1022,7 +1022,7 @@ always zero). {%- endif %} &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_POST" {{ nnodes_run_post }}:ppn={{ ppn_run_post }} {{ wtime_run_post }} &NCORES_PER_NODE; @@ -1095,7 +1095,7 @@ the tag to be identical to the ones above for other output times. &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JAQM_POST" {{ nnodes_run_post }}:ppn={{ ppn_run_post }} {{ wtime_run_post }} &NCORES_PER_NODE; diff --git a/parm/config/var_defns.sh.nco.static b/parm/config/var_defns.sh.nco.static index da0526f669..55ebf3447a 100644 --- a/parm/config/var_defns.sh.nco.static +++ b/parm/config/var_defns.sh.nco.static @@ -950,17 +950,15 @@ AQM_GEFS_FILE_CYC='' COMINemis='@COMaqm@/emissions' FIXaqmconfig='@HOMEaqm@/fix/aqm/epa/data' FIXaqmfire='@HOMEaqm@/fix/fire' -FIXaqmbio='@HOMEaqm@/fix/bio' FIXaqmdust='@HOMEaqm@/fix/FENGSHA' FIXaqmcanopy='@HOMEaqm@/fix/canopy' FIXaqmchem_lbcs='@HOMEaqm@/fix/chem_lbcs' FIXaqmnexus='@HOMEaqm@/fix/nexus' -FIXaqmnexus_gfs_sfc='@HOMEaqm@/fix/gfs' NEXUS_GRID_FN='grid_spec_793.nc' NUM_SPLIT_NEXUS='6' NEXUS_GFS_SFC_ARCHV_DIR='/NCEPPROD/hpssprod/runhistory' COMINemispt='@COMaqm@/emissions/nei2016v1-pt' -DCOMINairnow='@OPSROOT@/dcom' +DCOMINairnow='/lfs/h1/ops/prod/dcom' COMINbicor='@COMaqm@' COMOUTbicor='@COMaqm@' diff --git a/scripts/exaqm_bias_correction_o3.sh b/scripts/exaqm_bias_correction_o3.sh index 4b89d3ea63..5f73cb08bd 100755 --- a/scripts/exaqm_bias_correction_o3.sh +++ b/scripts/exaqm_bias_correction_o3.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_bias_correction_o3 + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -146,11 +152,9 @@ mkdir -p "${DATA}/data" print_info_msg "${message_warning}" fi - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk done #----------------------------------------------------------------------------- @@ -195,11 +199,9 @@ cp ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/ cp ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords cp ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} @@ -240,11 +242,9 @@ ln -sf ${COMINbicor}/bcdata* "${DATA}/data" mkdir -p ${DATA}/data/sites cp ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cp ${DATA}/out/ozone.corrected* ${COMOUT} @@ -269,11 +269,9 @@ id_gribdomain=${id_domain} EOF1 # convert from netcdf to grib2 format -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cp ${DATA}/${NET}.${cycle}.awpozcon*bc*.grib2 ${COMOUT} @@ -335,11 +333,9 @@ EOF1 #------------------------------------------------- # write out grib2 format #------------------------------------------------- - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk # split into max_1h and max_8h files and copy to grib227 wgrib2 aqm-maxi_bc.${id_domain}.grib2 |grep "OZMAX1" | wgrib2 -i aqm-maxi_bc.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_o3_bc.${id_domain}.grib2 @@ -376,13 +372,13 @@ EOF1 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 done - # Post Files to COMOUTwmo - cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} + # Post Files to PCOM + cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${PCOM} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1ho3-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.8ho3-max-bc.227.grib2 fi fi fi @@ -474,14 +470,14 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 - # Post Files to COMOUTwmo - cp awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} - cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} + # Post Files to PCOM + cp awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${PCOM} + cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${PCOM} # Distribute Data if [ "${SENDDBN}" = "TRUE" ]; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.${hr}ho3-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.${hr}ho3-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 fi done fi diff --git a/scripts/exaqm_bias_correction_pm25.sh b/scripts/exaqm_bias_correction_pm25.sh index b8c6736658..2edc049008 100755 --- a/scripts/exaqm_bias_correction_pm25.sh +++ b/scripts/exaqm_bias_correction_pm25.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_bias_correction_pm25 + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEF # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -144,11 +150,9 @@ mkdir -p "${DATA}/data" print_info_msg "${message_warning}" fi - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk done #----------------------------------------------------------------------------- @@ -193,11 +197,9 @@ cp ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/d cp ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords cp ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$? err_chk cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} @@ -220,11 +222,8 @@ cp ${PARMaqm_utils}/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z cp ${PARMaqm_utils}/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA} cp ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cp $DATA/out/pm2.5.corrected* ${COMOUT} @@ -248,11 +247,9 @@ id_gribdomain=${id_domain} / EOF1 -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cp ${DATA}/${NET}.${cycle}.pm25*bc*.grib2 ${COMOUT} @@ -313,11 +310,9 @@ EOF1 #------------------------------------------------- # write out grib2 format #------------------------------------------------- - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk # split into two files: one for 24hr_ave and one for 1h_max wgrib2 aqm-pm25_bc.${id_domain}.grib2 |grep "PMTF" | ${WGRIB2} -i aqm-pm25_bc.${id_domain}.grib2 -grib aqm.t${cyc}z.ave_24hr_pm25_bc.793.grib2 @@ -423,16 +418,16 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 - # Post Files to COMOUTwmo - cp awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} - cp awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} - cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} + # Post Files to PCOM + cp awpaqm.${cycle}.1hpm25-bc.227.grib2 ${PCOM} + cp awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${PCOM} + cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${PCOM} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 fi fi diff --git a/scripts/exaqm_data_cleanup.sh b/scripts/exaqm_data_cleanup.sh index 5a4f3ba75a..efe9c932c6 100755 --- a/scripts/exaqm_data_cleanup.sh +++ b/scripts/exaqm_data_cleanup.sh @@ -1,6 +1,6 @@ #! /bin/bash -set -x +set -xe ############################################## # Clean up the DATA directory from previous cycle if found ############################################## diff --git a/scripts/exaqm_fire_emission.sh b/scripts/exaqm_fire_emission.sh index 5d2465e9ab..b82c18e5b3 100755 --- a/scripts/exaqm_fire_emission.sh +++ b/scripts/exaqm_fire_emission.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_fire_emission + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_run_fcst.sh b/scripts/exaqm_forecast.sh similarity index 98% rename from scripts/exaqm_run_fcst.sh rename to scripts/exaqm_forecast.sh index 8643e75dfe..f79bb05863 100755 --- a/scripts/exaqm_run_fcst.sh +++ b/scripts/exaqm_forecast.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_fcst + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_ # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -451,7 +457,7 @@ fi # if [ "${DO_ENSEMBLE}" = TRUE ] && ([ "${DO_SPP}" = TRUE ] || [ "${DO_SPPT}" = TRUE ] || [ "${DO_SHUM}" = TRUE ] || \ [ "${DO_SKEB}" = TRUE ] || [ "${DO_LSM_SPP}" = TRUE ]); then - $USHdir/set_FV3nml_ens_stoch_seeds.py \ + $USHaqm/set_FV3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" export err=$? @@ -484,7 +490,7 @@ if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then flag_fcst_restart="TRUE" # Update FV3 input.nml for restart - $USHdir/update_input_nml.py \ + $USHaqm/update_input_nml.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run_dir "${DATA}" \ --restart @@ -554,7 +560,7 @@ if [ "${CPL_AQM}" = "TRUE" ]; then # #----------------------------------------------------------------------- # - $USHdir/create_aqm_rc_file.py \ + $USHaqm/create_aqm_rc_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" \ --run-dir "${DATA}" \ @@ -576,7 +582,7 @@ fi # #----------------------------------------------------------------------- # - $USHdir/create_model_configure_file.py \ + $USHaqm/create_model_configure_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" \ --fcst_len_hrs "${FCST_LEN_HRS}" \ @@ -601,7 +607,7 @@ fi # #----------------------------------------------------------------------- # - $USHdir/create_diag_table_file.py \ + $USHaqm/create_diag_table_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run-dir "${DATA}" export err=$? @@ -631,7 +637,7 @@ fi # #----------------------------------------------------------------------- # - $USHdir/create_nems_configure_file.py \ + $USHaqm/create_nems_configure_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run-dir "${DATA}" export err=$? @@ -652,11 +658,9 @@ fi # #----------------------------------------------------------------------- # -PREP_STEP -eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_aqm_ics.sh b/scripts/exaqm_ics.sh similarity index 97% rename from scripts/exaqm_aqm_ics.sh rename to scripts/exaqm_ics.sh index cf883c3469..ebdd7f0ea8 100755 --- a/scripts/exaqm_aqm_ics.sh +++ b/scripts/exaqm_ics.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_ics + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_aqm_lbcs.sh b/scripts/exaqm_lbcs.sh similarity index 96% rename from scripts/exaqm_aqm_lbcs.sh rename to scripts/exaqm_lbcs.sh index 6b534dc9ad..39ade384ce 100755 --- a/scripts/exaqm_aqm_lbcs.sh +++ b/scripts/exaqm_lbcs.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_lbcs + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aq # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -264,7 +270,7 @@ Species converting Factor EOF exec_fn="gefs2lbc_para" - exec_fp="$EXECdir/${exec_fn}" + exec_fp="$EXECaqm/${exec_fn}" if [ ! -f "${exec_fp}" ]; then print_err_msg_exit "\ The executable (exec_fp) for GEFS LBCs does not exist: @@ -278,12 +284,10 @@ Please ensure that you've built this executable." # #---------------------------------------------------------------------- # - PREP_STEP + startmsg sync - eval ${RUN_CMD_AQMLBC} ${exec_fp} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + eval ${RUN_CMD_AQMLBC} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk cp -rp ${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f*.nc ${INPUT_DATA} diff --git a/scripts/exaqm_make_ics.sh b/scripts/exaqm_make_ics.sh index 9bd5a92bc7..88ec2fdb99 100755 --- a/scripts/exaqm_make_ics.sh +++ b/scripts/exaqm_make_ics.sh @@ -1,13 +1,18 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_make_ics #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +22,7 @@ source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -145,7 +150,7 @@ if [ "${RUN_TASK_GET_EXTRN_ICS}" = "FALSE" ]; then EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.ICS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" cmd=" - python3 -u ${USHdir}/retrieve_data.py \ + python3 -u ${USHaqm}/retrieve_data.py \ --debug \ --symlink \ --file_set ${file_set} \ @@ -210,24 +215,6 @@ case "${CCPP_PHYS_SUITE}" in "FV3_GFS_v15p2" ) varmap_file="GFSphys_var_map.txt" ;; -# - "FV3_RRFS_v1beta" | \ - "FV3_GFS_v15_thompson_mynn_lam3km" | \ - "FV3_GFS_v17_p8" | \ - "FV3_WoFS_v0" | \ - "FV3_HRRR" ) - if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then - varmap_file="GSDphys_var_map.txt" - elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GEFS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GDAS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" ]; then - varmap_file="GFSphys_var_map.txt" - fi - ;; -# *) message_txt="The variable \"varmap_file\" has not yet been specified for this physics suite (CCPP_PHYS_SUITE): @@ -399,11 +386,6 @@ convert_nst="" #----------------------------------------------------------------------- # nsoill_out="4" -if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ - "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then - nsoill_out="9" -fi # #----------------------------------------------------------------------- # @@ -419,9 +401,7 @@ fi #----------------------------------------------------------------------- # thomp_mp_climo_file="" -if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ - "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then +if [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -434,22 +414,6 @@ fi # case "${EXTRN_MDL_NAME_ICS}" in -"GSMGFS") - external_model="GSMGFS" - fn_atm="${EXTRN_MDL_FNS[0]}" - fn_sfc="${EXTRN_MDL_FNS[1]}" - input_type="gfs_gaussian_nemsio" # For spectral GFS Gaussian grid in nemsio format. - convert_nst=False - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\"]" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - ;; - "FV3GFS") if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then external_model="FV3GFS" @@ -497,68 +461,6 @@ case "${EXTRN_MDL_NAME_ICS}" in tg3_from_soil=True ;; -"GEFS") - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - convert_nst=False - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - ;; - -"HRRR") - external_model="HRRR" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" -# -# Path to the HRRRX geogrid file. -# - geogrid_file_input_grid="${FIXgsm}/geo_em.d01.nc_HRRRX" -# Note that vgfrc, shdmin/shdmax (minmax_vgfrc), and lai fields are only available in HRRRX -# files after mid-July 2019, and only so long as the record order didn't change afterward - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=True - convert_nst=False - ;; - -"RAP") - external_model="RAP" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" -# -# Path to the RAPX geogrid file. -# - geogrid_file_input_grid="${FIXgsm}/geo_em.d01.nc_RAPX" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=True - convert_nst=False - ;; - -"NAM") - external_model="NAM" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - convert_nst=False - ;; - *) message_txt="External-model-dependent namelist variables have not yet been specified for this external IC model (EXTRN_MDL_NAME_ICS): @@ -585,7 +487,7 @@ hh="${EXTRN_MDL_CDATE:8:2}" #----------------------------------------------------------------------- # exec_fn="chgres_cube" -exec_fp="$EXECdir/${exec_fn}" +exec_fp="$EXECaqm/${exec_fn}" if [ ! -f "${exec_fp}" ]; then message_txt="The executable (exec_fp) for generating initial conditions on the FV3-LAM native grid does not exist: @@ -657,7 +559,7 @@ settings=" # Call the python script to create the namelist file. # nml_fn="fort.41" -${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} +${USHaqm}/set_namelist.py -q -u "$settings" -o ${nml_fn} err=$? if [ $err -ne 0 ]; then message_txt="Call to python script set_namelist.py to set the variables @@ -685,18 +587,19 @@ fi # exit code of chgres_cube is nonzero. A similar thing happens in the # forecast task. # -PREP_STEP -eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "\ -Call to executable (exec_fp) to generate surface and initial conditions -(ICs) files for the FV3-LAM failed: - exec_fp = \"${exec_fp}\" -The external model from which the ICs files are to be generated is: - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -The external model files that are inputs to the executable (exec_fp) are -located in the following directory: - extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" -POST_STEP +startmsg +eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk +#eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} || \ +# print_err_msg_exit "\ +#Call to executable (exec_fp) to generate surface and initial conditions +#(ICs) files for the FV3-LAM failed: +# exec_fp = \"${exec_fp}\" +#The external model from which the ICs files are to be generated is: +# EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" +#The external model files that are inputs to the executable (exec_fp) are +#located in the following directory: +# extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" # #----------------------------------------------------------------------- # @@ -715,55 +618,6 @@ mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc # -#----------------------------------------------------------------------- -# -# Process FVCOM Data -# -#----------------------------------------------------------------------- -# -if [ "${USE_FVCOM}" = "TRUE" ]; then - -#Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 - fvcom_exec_fn="fvcom_to_FV3" - fvcom_exec_fp="$EXECdir/${fvcom_exec_fn}" - fvcom_time="${DATE_FIRST_CYCL:0:4}-${DATE_FIRST_CYCL:4:2}-${DATE_FIRST_CYCL:6:2}T${DATE_FIRST_CYCL:8:2}:00:00.000000" - if [ ! -f "${fvcom_exec_fp}" ]; then - message_txt="The executable (fvcom_exec_fp) for processing FVCOM data -onto FV3-LAM native grid does not exist: - fvcom_exec_fp = \"${fvcom_exec_fp}\" -Please ensure that you've built this executable." - err_exit "${message_txt}" - fi - cp ${fvcom_exec_fp} ${INPUT_DATA}/. - fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" - if [ ! -f "${fvcom_data_fp}" ]; then - message_txt="The file or path (fvcom_data_fp) does not exist: - fvcom_data_fp = \"${fvcom_data_fp}\" -Please check the following user defined variables: - FVCOM_DIR = \"${FVCOM_DIR}\" - FVCOM_FILE= \"${FVCOM_FILE}\" " - err_exit "${message_txt}" - fi - - cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc - cd ${INPUT_DATA} - PREP_STEP - eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \ - ${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \ - ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable (fvcom_exe) to modify sfc fields for FV3-LAM failed: - fvcom_exe = \"${fvcom_exe}\" -The following variables were being used: - FVCOM_DIR = \"${FVCOM_DIR}\" - FVCOM_FILE = \"${FVCOM_FILE}\" - fvcom_time = \"${fvcom_time}\" - FVCOM_WCSTART = \"${FVCOM_WCSTART}\" - INPUT_DATA = \"${INPUT_DATA}\" - fvcom_exe_dir = \"${fvcom_exe_dir}\" - fvcom_exe = \"${fvcom_exe}\"" - POST_STEP -fi - #----------------------------------------------------------------------- # # Set up the RESTART folder for AQM runs diff --git a/scripts/exaqm_make_lbcs.sh b/scripts/exaqm_make_lbcs.sh index 5028c9cd52..c44e8fd325 100755 --- a/scripts/exaqm_make_lbcs.sh +++ b/scripts/exaqm_make_lbcs.sh @@ -1,13 +1,17 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_make_lbcs + #----------------------------------------------------------------------- -# # Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +21,7 @@ source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_F # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -148,7 +152,7 @@ if [ "${RUN_TASK_GET_EXTRN_LBCS}" = "FALSE" ]; then EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.LBCS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" cmd=" - python3 -u ${USHdir}/retrieve_data.py \ + python3 -u ${USHaqm}/retrieve_data.py \ --debug \ --symlink \ --file_set ${file_set} \ @@ -229,24 +233,6 @@ case "${CCPP_PHYS_SUITE}" in "FV3_GFS_v15p2" ) varmap_file="GFSphys_var_map.txt" ;; -# - "FV3_RRFS_v1beta" | \ - "FV3_GFS_v15_thompson_mynn_lam3km" | \ - "FV3_GFS_v17_p8" | \ - "FV3_WoFS_v0" | \ - "FV3_HRRR" ) - if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then - varmap_file="GSDphys_var_map.txt" - elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GEFS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GDAS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" ]; then - varmap_file="GFSphys_var_map.txt" - fi - ;; -# *) message_txt="The variable \"varmap_file\" has not yet been specified for this physics suite (CCPP_PHYS_SUITE): @@ -364,9 +350,7 @@ tracers="\"\"" #----------------------------------------------------------------------- # thomp_mp_climo_file="" -if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ - "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then +if [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -379,13 +363,6 @@ fi # case "${EXTRN_MDL_NAME_LBCS}" in -"GSMGFS") - external_model="GSMGFS" - input_type="gfs_gaussian_nemsio" # For spectral GFS Gaussian grid in nemsio format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\"]" - ;; - "FV3GFS") if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then external_model="FV3GFS" @@ -411,28 +388,6 @@ case "${EXTRN_MDL_NAME_LBCS}" in input_type="gaussian_netcdf" fn_atm="${EXTRN_MDL_FNS[0]}" ;; - -"GEFS") - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - ;; - -"RAP") - external_model="RAP" - input_type="grib2" - ;; - -"HRRR") - external_model="HRRR" - input_type="grib2" - ;; - -"NAM") - external_model="NAM" - input_type="grib2" - ;; - *) message_txt="External-model-dependent namelist variables have not yet been specified for this external LBC model (EXTRN_MDL_NAME_LBCS): @@ -449,7 +404,7 @@ esac #----------------------------------------------------------------------- # exec_fn="chgres_cube" -exec_fp="$EXECdir/${exec_fn}" +exec_fp="$EXECaqm/${exec_fn}" if [ ! -f "${exec_fp}" ]; then message_txt="The executable (exec_fp) for generating initial conditions on the FV3-LAM native grid does not exist: @@ -479,9 +434,6 @@ for (( i=0; i<${num_fhrs}; i++ )); do fn_grib2="" case "${EXTRN_MDL_NAME_LBCS}" in - "GSMGFS") - fn_atm="${EXTRN_MDL_FNS[$i]}" - ;; "FV3GFS") if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then fn_atm="${EXTRN_MDL_FNS[$i]}" @@ -497,15 +449,6 @@ for (( i=0; i<${num_fhrs}; i++ )); do "GEFS") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; - "RAP") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - "HRRR") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - "NAM") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; *) message_txt="The external model output file name to use in the chgres_cube FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_NAME_LBCS): @@ -582,7 +525,7 @@ settings=" # Call the python script to create the namelist file. # nml_fn="fort.41" - ${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + ${USHaqm}/set_namelist.py -q -u "$settings" -o ${nml_fn} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script set_namelist.py to set the variables @@ -610,11 +553,9 @@ $settings" # exit code of chgres_cube is nonzero. A similar thing happens in the # forecast task. # - PREP_STEP - eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk # # Move LBCs file for the current lateral boundary update time to the LBCs # work directory. Note that we rename the file by including in its name diff --git a/scripts/exaqm_nexus_emission.sh b/scripts/exaqm_nexus_emission.sh index 153b20922b..f44d6622cd 100755 --- a/scripts/exaqm_nexus_emission.sh +++ b/scripts/exaqm_nexus_emission.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_nexus_emissions + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_emission|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_emission|task_nexus_gfs_sfc" ${G # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -159,7 +165,7 @@ fi # #----------------------------------------------------------------------- # -cp ${EXECdir}/nexus ${DATA} +cp ${EXECaqm}/nexus ${DATA} cp ${FIXaqmnexus}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc @@ -353,11 +359,9 @@ fi # #----------------------------------------------------------------------- # -PREP_STEP -eval ${RUN_CMD_AQM} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_AQM} ${EXECaqm}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk # #----------------------------------------------------------------------- diff --git a/scripts/exaqm_nexus_post_split.sh b/scripts/exaqm_nexus_post_split.sh index 70ed2da45d..93186a5615 100755 --- a/scripts/exaqm_nexus_post_split.sh +++ b/scripts/exaqm_nexus_post_split.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_nexus_post_split + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_F # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -81,7 +87,7 @@ if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then nspt="00" cp ${COMIN}/${cyc}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc else - ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/${cyc}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" + ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/${cyc}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" >> $pgmout 2>errfile export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed." diff --git a/scripts/exaqm_point_source.sh b/scripts/exaqm_point_source.sh index 67eb52e1d2..b9df938e5c 100755 --- a/scripts/exaqm_point_source.sh +++ b/scripts/exaqm_point_source.sh @@ -1,13 +1,18 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_point_source #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +22,7 @@ source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_run_post.sh b/scripts/exaqm_post.sh similarity index 97% rename from scripts/exaqm_run_post.sh rename to scripts/exaqm_post.sh index fe8b231644..b43b0a2b01 100755 --- a/scripts/exaqm_run_post.sh +++ b/scripts/exaqm_post.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_post + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -226,11 +232,9 @@ EOF print_info_msg "$VERBOSE" " Starting post-processing for fhr = $fhr hr..." -PREP_STEP -eval ${RUN_CMD_POST} ${EXECdir}/upp.x < itag ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_POST} ${EXECaqm}/upp.x < itag ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_post_stat_o3.sh b/scripts/exaqm_post_stat_o3.sh index f4dba4b576..e348751bf6 100755 --- a/scripts/exaqm_post_stat_o3.sh +++ b/scripts/exaqm_post_stat_o3.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_post_stat_o3 + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -100,11 +106,9 @@ id_gribdomain=${id_domain} EOF1 # convert from netcdf to grib2 format -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -156,7 +160,7 @@ for grid in 227 196 198;do done for var in 1ho3 8ho3;do cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} - cp ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} + cp ${DATA}/awpaqm.${cycle}.${var}*grib2 ${PCOM} done for var in awpozcon;do cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} @@ -224,11 +228,9 @@ EOF1 fi fi - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk # split into max_1h and max_8h files and copy to grib227 wgrib2 aqm-maxi.${id_domain}.grib2 |grep "OZMAX1" | wgrib2 -i aqm-maxi.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 @@ -267,10 +269,10 @@ EOF1 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} done - cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${PCOM} if [ "${SENDDBN_NTC}" = "TRUE" ]; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 fi done fi diff --git a/scripts/exaqm_post_stat_pm25.sh b/scripts/exaqm_post_stat_pm25.sh index dd9fc6545b..6e67bab1b9 100755 --- a/scripts/exaqm_post_stat_pm25.sh +++ b/scripts/exaqm_post_stat_pm25.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_post_stat_pm25 + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -103,11 +109,9 @@ id_gribdomain=${id_domain} EOF1 # convert from netcdf to grib2 format -PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} -export err=$? - err_chk -POST_STEP +startmsg +eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile +export err=$?; err_chk cat ${NET}.${cycle}.pm25.*.${id_domain}.grib2 >> ${NET}.${cycle}.1hpm25.${id_domain}.grib2 @@ -142,13 +146,13 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.1hpm25.${grid}.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} - # Post Files to COMOUTwmo - cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} + # Post Files to PCOM + cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${PCOM} # Distribute Data # if [ "${SENDDBN_NTC}" = "TRUE" ] ; then -# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 -# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25.${grid}.grib2 +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 # fi done fi @@ -208,17 +212,14 @@ EOF1 fi fi - PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} - export err=$? - err_chk - POST_STEP + startmsg + eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile + export err=$?; err_chk wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2 wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PDMAX1" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_pm25.${id_domain}.grib2 export grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" - #export grid148="lambert:263.0000:33.0000:45.0000 239.3720:442:12000.000 21.8210:265:12000.000" export grid196="mercator:20.0000 198.4750:321:2500.000:206.1310 18.0730:255:2500.000:23.0880" export grid198="nps:210.0000:60.0000 181.4290:825:5953.000 40.5300:553:5953.000" @@ -264,21 +265,21 @@ EOF1 cp ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} cp ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} - cp awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} - cp awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${PCOM} + cp awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${PCOM} ############################## # Distribute Data ############################## if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi if [ "$SENDDBN" = "TRUE" ]; then - ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${PCOM}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi done fi diff --git a/scripts/exaqm_pre_post_stat.sh b/scripts/exaqm_pre_post_stat.sh index 0e01cb8680..ba78c63e1f 100755 --- a/scripts/exaqm_pre_post_stat.sh +++ b/scripts/exaqm_pre_post_stat.sh @@ -1,13 +1,19 @@ #!/bin/bash -# +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_pre_post_stat + #----------------------------------------------------------------------- # # Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. $USHaqm/source_util_funcs.sh source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +23,7 @@ source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/auto_A1_cp_fix_link_fix_lam.sh b/ush/auto_A1_cp_fix_link_fix_lam.sh deleted file mode 100755 index 404594043c..0000000000 --- a/ush/auto_A1_cp_fix_link_fix_lam.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# -export dev_fix=/lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix -cd ../ - -export HOMEaqm=$(pwd) - -mkdir -p $HOMEaqm/fix -cd $HOMEaqm/fix -for var in aqm bio canopy chem_lbcs FENGSHA fire fix_aer fix_am fix_lut fix_orog fix_sfc_climo nexus restart - do - cp -rp ${dev_fix}/$var . -done - -mkdir -p $HOMEaqm/fix/fix_lam -cd ${HOMEaqm}/fix/fix_lam -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_mosaic.halo6.nc C793_mosaic.halo6.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_mosaic.halo4.nc C793_mosaic.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_mosaic.halo3.nc C793_mosaic.halo3.nc -ln -s C793_grid.tile7.halo4.nc C793_grid.tile7.nc -ln -s ${HOMEaqm}/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_grid.tile7.halo6.nc C793_grid.tile7.halo6.nc -ln -s ${HOMEaqm}/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_grid.tile7.halo4.nc C793_grid.tile7.halo4.nc -ln -s ${HOMEaqm}/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_grid.tile7.halo3.nc C793_grid.tile7.halo3.nc -ln -s C793.vegetation_type.tile7.halo4.nc C793.vegetation_type.tile7.nc -ln -s ${HOMEaqm}/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.vegetation_type.tile7.halo4.nc C793.vegetation_type.tile7.halo4.nc -ln -s ${HOMEaqm}/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.vegetation_type.tile7.halo0.nc C793.vegetation_type.tile7.halo0.nc -ln -s C793.vegetation_type.tile7.halo0.nc C793.vegetation_type.tile1.nc -ln -s C793.vegetation_greenness.tile7.halo4.nc C793.vegetation_greenness.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.vegetation_greenness.tile7.halo4.nc C793.vegetation_greenness.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.vegetation_greenness.tile7.halo0.nc C793.vegetation_greenness.tile7.halo0.nc -ln -s C793.vegetation_greenness.tile7.halo0.nc C793.vegetation_greenness.tile1.nc -ln -s C793.substrate_temperature.tile7.halo4.nc C793.substrate_temperature.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.substrate_temperature.tile7.halo4.nc C793.substrate_temperature.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.substrate_temperature.tile7.halo0.nc C793.substrate_temperature.tile7.halo0.nc -ln -s C793.substrate_temperature.tile7.halo0.nc C793.substrate_temperature.tile1.nc -ln -s C793.soil_type.tile7.halo4.nc C793.soil_type.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.soil_type.tile7.halo4.nc C793.soil_type.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.soil_type.tile7.halo0.nc C793.soil_type.tile7.halo0.nc -ln -s C793.soil_type.tile7.halo0.nc C793.soil_type.tile1.nc -ln -s C793.snowfree_albedo.tile7.halo4.nc C793.snowfree_albedo.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.snowfree_albedo.tile7.halo4.nc C793.snowfree_albedo.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.snowfree_albedo.tile7.halo0.nc C793.snowfree_albedo.tile7.halo0.nc -ln -s C793.snowfree_albedo.tile7.halo0.nc C793.snowfree_albedo.tile1.nc -ln -s C793.slope_type.tile7.halo4.nc C793.slope_type.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.slope_type.tile7.halo4.nc C793.slope_type.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.slope_type.tile7.halo0.nc C793.slope_type.tile7.halo0.nc -ln -s C793.slope_type.tile7.halo0.nc C793.slope_type.tile1.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_oro_data.tile7.halo4.nc C793_oro_data.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793_oro_data.tile7.halo0.nc C793_oro_data.tile7.halo0.nc -ln -s C793.maximum_snow_albedo.tile7.halo4.nc C793.maximum_snow_albedo.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.maximum_snow_albedo.tile7.halo4.nc C793.maximum_snow_albedo.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.maximum_snow_albedo.tile7.halo0.nc C793.maximum_snow_albedo.tile7.halo0.nc -ln -s C793.maximum_snow_albedo.tile7.halo0.nc C793.maximum_snow_albedo.tile1.nc -ln -s C793.facsf.tile7.halo4.nc C793.facsf.tile7.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.facsf.tile7.halo4.nc C793.facsf.tile7.halo4.nc -ln -s $HOMEaqm/fix/aqm/DOMAIN_DATA/AQM_NA_13km/C793.facsf.tile7.halo0.nc C793.facsf.tile7.halo0.nc -ln -s C793.facsf.tile7.halo0.nc C793.facsf.tile1.nc diff --git a/ush/auto_A2_cp_fix.sh b/ush/auto_A2_cp_fix.sh new file mode 100755 index 0000000000..209b7690aa --- /dev/null +++ b/ush/auto_A2_cp_fix.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# +export dev_fix=/lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix +cd ../ + +export HOMEaqm=$(pwd) + +cd $HOMEaqm + +cp -rp ${dev_fix} . + From 145e2dd0a4f0fdc697b9996dd36c3c569837bef6 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 17 Nov 2023 15:07:24 +0000 Subject: [PATCH 02/24] update J-job and ex-sripts for forecast, post, ics, and lbcs --- ecf/scripts/forecast/jaqm_forecast.ecf | 2 +- .../forecast/jaqm_forecast_data_cleanup.ecf | 2 +- ecf/scripts/post/jaqm_post_master.ecf | 2 +- ecf/scripts/prep/jaqm_ics.ecf | 2 +- ecf/scripts/prep/jaqm_lbcs.ecf | 2 +- parm/config/var_defns.sh.nco.static | 3 +- ush/job_preamble.sh | 158 +----------------- 7 files changed, 8 insertions(+), 163 deletions(-) diff --git a/ecf/scripts/forecast/jaqm_forecast.ecf b/ecf/scripts/forecast/jaqm_forecast.ecf index 21efac284b..7b75cb272e 100644 --- a/ecf/scripts/forecast/jaqm_forecast.ecf +++ b/ecf/scripts/forecast/jaqm_forecast.ecf @@ -49,7 +49,7 @@ export subcyc=0 ############################################################ # CALL executable job script here ############################################################ -${HOMEaqm}/jobs/JAQM_RUN_FCST +${HOMEaqm}/jobs/JAQM_FORECAST %include diff --git a/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf b/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf index 918e888006..b018e43fcd 100644 --- a/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf +++ b/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf @@ -28,7 +28,7 @@ export DATA=${DATA:-${DATAROOT}/${jobid:?}} ############################################################ # CALL executable job script here ############################################################ -${HOMEaqm}/jobs/JDATA_CLEANUP +${HOMEaqm}/jobs/JAQM_DATA_CLEANUP %include %manual diff --git a/ecf/scripts/post/jaqm_post_master.ecf b/ecf/scripts/post/jaqm_post_master.ecf index 8f6948f221..70333e1552 100644 --- a/ecf/scripts/post/jaqm_post_master.ecf +++ b/ecf/scripts/post/jaqm_post_master.ecf @@ -40,7 +40,7 @@ export DATA=${DATA:-${DATAROOT}/${jobid:?}} ############################################################ # CALL executable job script here ############################################################ -${HOMEaqm}/jobs/JAQM_RUN_POST +${HOMEaqm}/jobs/JAQM_POST %include diff --git a/ecf/scripts/prep/jaqm_ics.ecf b/ecf/scripts/prep/jaqm_ics.ecf index b9007af777..e96fd91928 100644 --- a/ecf/scripts/prep/jaqm_ics.ecf +++ b/ecf/scripts/prep/jaqm_ics.ecf @@ -46,7 +46,7 @@ export DATA=${DATA:-${DATAROOT}/${jobid:?}} ############################################################ # CALL executable job script here ############################################################ -${HOMEaqm}/jobs/JAQM_AQM_ICS +${HOMEaqm}/jobs/JAQM_ICS %include diff --git a/ecf/scripts/prep/jaqm_lbcs.ecf b/ecf/scripts/prep/jaqm_lbcs.ecf index e7998111e4..9b4f40426f 100644 --- a/ecf/scripts/prep/jaqm_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_lbcs.ecf @@ -52,7 +52,7 @@ export DATA=${DATA:-${DATAROOT}/${jobid:?}} ############################################################ # CALL executable job script here ############################################################ -${HOMEaqm}/jobs/JAQM_AQM_LBCS +${HOMEaqm}/jobs/JAQM_LBCS %include diff --git a/parm/config/var_defns.sh.nco.static b/parm/config/var_defns.sh.nco.static index 55ebf3447a..96008d64dd 100644 --- a/parm/config/var_defns.sh.nco.static +++ b/parm/config/var_defns.sh.nco.static @@ -937,7 +937,7 @@ AQM_DUST_FILE_PREFIX='FENGSHA_p8_10km_inputs' AQM_DUST_FILE_SUFFIX='.nc' AQM_CANOPY_FILE_PREFIX='gfs.t12z.geo' AQM_CANOPY_FILE_SUFFIX='.canopy_regrid.nc' -DCOMINfire='@OPSROOT@/dcom' +DCOMINfire='/lfs/h1/ops/prod/dcom' AQM_FIRE_FILE_PREFIX='Hourly_Emissions_regrid_NA_13km' AQM_FIRE_FILE_SUFFIX='_h72.nc' AQM_RC_FIRE_FREQUENCY='hourly' @@ -950,6 +950,7 @@ AQM_GEFS_FILE_CYC='' COMINemis='@COMaqm@/emissions' FIXaqmconfig='@HOMEaqm@/fix/aqm/epa/data' FIXaqmfire='@HOMEaqm@/fix/fire' +FIXaqmfire='@HOMEaqm@/fix/bio' FIXaqmdust='@HOMEaqm@/fix/FENGSHA' FIXaqmcanopy='@HOMEaqm@/fix/canopy' FIXaqmchem_lbcs='@HOMEaqm@/fix/chem_lbcs' diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 02ebbce71a..082564705c 100755 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -2,9 +2,7 @@ # #----------------------------------------------------------------------- -# # If requested to share data with next task, override jobid -# #----------------------------------------------------------------------- # if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then @@ -19,9 +17,7 @@ if [ $# -ne 0 ]; then fi # #----------------------------------------------------------------------- -# # Set NCO standard environment variables -# #----------------------------------------------------------------------- # export envir="${envir:-${envir_dfv}}" @@ -29,147 +25,27 @@ export NET="${NET:-${NET_dfv}}" export RUN="${RUN:-${RUN_dfv}}" export model_ver="${model_ver:-${model_ver_dfv}}" export COMROOT="${COMROOT:-${COMROOT_dfv}}" -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" -export DCOMROOT="${DCOMROOT:-${DCOMROOT_dfv}}" export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_dfv}}" -export DBNROOT="${DBNROOT:-${DBNROOT_dfv}}" -export SENDECF="${SENDECF:-${SENDECF_dfv}}" -export SENDDBN="${SENDDBN:-${SENDDBN_dfv}}" -export SENDDBN_NTC="${SENDDBN_NTC:-${SENDDBN_NTC_dfv}}" -export SENDCOM="${SENDCOM:-${SENDCOM_dfv}}" -export SENDWEB="${SENDWEB:-${SENDWEB_dfv}}" export KEEPDATA="${KEEPDATA:-${KEEPDATA_dfv}}" export MAILTO="${MAILTO:-${MAILTO_dfv}}" export MAILCC="${MAILCC:-${MAILCC_dfv}}" -[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" -export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" +export dot_ensmem= -export FIXaqmconfig="${FIXaqmbio:-${HOMEaqm}/fix/aqm/epa/data}" -export FIXaqmbio="${FIXaqmbio:-${HOMEaqm}/fix/bio}" -export FIXaqmdust="${FIXaqmdust:-${HOMEaqm}/fix/dust}" -export FIXaqmcanopy="${FIXaqmcanopy:-${HOMEaqm}/fix/canopy}" -export FIXaqmchem_lbcs="${FIXaqmchem_lbcs:-${HOMEaqm}/fix/chem_lbcs}" -export FIXaqmnexus="${FIXaqmnexus:-${HOMEaqm}/fix/nexus}" -export FIXaqmnexus_gfs_sfc="${FIXaqmnexus:-${HOMEaqm}/fix/gfs}" -export FIXaqmfire="${FIXaqmfire:-${HOMEaqm}/fix/fire}" -export COMINgefs="${COMINgefs:-${COMROOT}/gefs/${gefs_ver})}" -export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMINemispt="${COMINemispt:-${COMIN}/emission/pt}" -export COMINemis="${COMINemis:-${COMIN}/emission}" -export DCOMINfire="${DCOMINfire:-${DCOMROOT}/${PDY}/rave}" -export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" - -# -#----------------------------------------------------------------------- -# -# Change YES/NO (NCO standards; job card) to TRUE/FALSE (workflow standards) -# for NCO environment variables -# -#----------------------------------------------------------------------- -# -export KEEPDATA=$(boolify "${KEEPDATA}") -export SENDCOM=$(boolify "${SENDCOM}") -export SENDDBN=$(boolify "${SENDDBN}") -export SENDDBN_NTC=$(boolify "${SENDDBN_NTC}") -export SENDECF=$(boolify "${SENDECF}") -export SENDWEB=$(boolify "${SENDWEB}") - -# -#----------------------------------------------------------------------- -# -# Set cycle and ensemble member names in file/diectory names -# -#----------------------------------------------------------------------- -# -if [ $subcyc -eq 0 ]; then - export cycle="t${cyc}z" -else - export cycle="t${cyc}${subcyc}z" -fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then - export dot_ensmem=".mem${ENSMEM_INDX}" -else - export dot_ensmem= -fi -# -#----------------------------------------------------------------------- -# -# Create a temp working directory (DATA) and cd into it. -# #----------------------------------------------------------------------- -# -export DATA= -export DATA=${DATAROOT}/${jobid} -mkdir -p $DATA -cd $DATA -# -#----------------------------------------------------------------------- -# -# Run setpdy to initialize PDYm and PDYp variables -# -#----------------------------------------------------------------------- -# -if [ ! -z $(command -v setpdy.sh) ]; then - COMROOT=$COMROOT setpdy.sh - . ./PDY -fi -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export CDATE=${PDY}${cyc} -# -#----------------------------------------------------------------------- -# # Set pgmout and pgmerr files -# #----------------------------------------------------------------------- -# export pgmout="${DATA}/OUTPUT.$$" export pgmerr="${DATA}/errfile" export REDIRECT_OUT_ERR=">>${pgmout} 2>${pgmerr}" export pgmout_lines=1 export pgmerr_lines=1 -function PREP_STEP() { - export pgm="$(basename ${0})" - if [ ! -z $(command -v prep_step) ]; then - . prep_step - else - # Append header - if [ -n "$pgm" ] && [ -n "$pgmout" ]; then - echo "$pgm" >> $pgmout - fi - # Remove error file - if [ -f $pgmerr ]; then - rm $pgmerr - fi - fi -} -function POST_STEP() { - if [ -f $pgmout ]; then - tail -n +${pgmout_lines} $pgmout - pgmout_line=$( wc -l $pgmout ) - pgmout_lines=$((pgmout_lines + 1)) - fi - if [ -f $pgmerr ]; then - tail -n +${pgmerr_lines} $pgmerr - pgmerr_line=$( wc -l $pgmerr ) - pgmerr_lines=$((pgmerr_lines + 1)) - fi -} -export -f PREP_STEP -export -f POST_STEP # #----------------------------------------------------------------------- -# # Create symlinks to log files in the experiment directory. Makes viewing # log files easier in NCO mode, as well as make CIs work -# #----------------------------------------------------------------------- # if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then @@ -180,35 +56,3 @@ if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then ln -sf $i ${__EXPTLOG}/${__LOGB}.log done fi -# -#----------------------------------------------------------------------- -# -# Add a postamble function -# -#----------------------------------------------------------------------- -# -function job_postamble() { - - # Remove temp directory - if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - # Remove current data directory - if [ $# -eq 0 ]; then - rm -rf $DATA - # Remove all current and shared data directories - elif [ "$1" = "TRUE" ]; then - rm -rf $DATA - share_pid="${WORKFLOW_ID}_${PDY}${cyc}" - rm -rf *${share_pid} - fi - fi - - # Print exit message - print_info_msg " -======================================================================== -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -} - - From 87704a91c7e5d923fe0befac7c72b19ec5fa2320 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Sat, 18 Nov 2023 00:59:15 +0000 Subject: [PATCH 03/24] modiy ecf and J-job scirpts to meet EE2 requirements for AQMv7 --- ecf/scripts/forecast/jaqm_forecast.ecf | 4 ---- .../forecast/jaqm_forecast_data_cleanup.ecf | 6 ------ .../nexus/jaqm_nexus_emission_master.ecf | 14 +++++++------- ecf/scripts/nexus/jaqm_nexus_post_split.ecf | 12 ++++++------ ecf/scripts/post/jaqm_post_master.ecf | 6 ------ ecf/scripts/prep/jaqm_ics.ecf | 13 ------------- ecf/scripts/prep/jaqm_lbcs.ecf | 8 -------- ecf/scripts/prep/jaqm_make_ics.ecf | 17 ++++++----------- ecf/scripts/prep/jaqm_make_lbcs.ecf | 10 ---------- .../product/jaqm_bias_correction_o3.ecf | 7 ------- .../product/jaqm_bias_correction_pm25.ecf | 7 ------- ecf/scripts/product/jaqm_post_stat_o3.ecf | 7 ------- ecf/scripts/product/jaqm_post_stat_pm25.ecf | 7 ------- ecf/scripts/product/jaqm_pre_post_stat.ecf | 7 ------- .../pts_fire_emis/jaqm_fire_emission.ecf | 12 ++++++------ .../pts_fire_emis/jaqm_point_source.ecf | 12 ++++++------ jobs/JAQM_BIAS_CORRECTION_O3 | 6 ++++++ jobs/JAQM_BIAS_CORRECTION_PM25 | 6 ++++++ jobs/JAQM_DATA_CLEANUP | 9 ++++++++- jobs/JAQM_FIRE_EMISSION | 6 ++++++ jobs/JAQM_FORECAST | 4 ++++ jobs/JAQM_ICS | 11 ++++++++++- jobs/JAQM_LBCS | 11 ++++++++++- jobs/JAQM_MAKE_ICS | 14 +++++++++++--- jobs/JAQM_MAKE_LBCS | 9 +++++++++ jobs/JAQM_NEXUS_EMISSION | 9 +++++++++ jobs/JAQM_NEXUS_POST_SPLIT | 8 ++++++++ jobs/JAQM_POINT_SOURCE | 6 ++++++ jobs/JAQM_POST | 6 ++++++ jobs/JAQM_POST_STAT_O3 | 7 +++++++ jobs/JAQM_POST_STAT_PM25 | 6 ++++++ jobs/JAQM_PRE_POST_STAT | 7 +++++++ scripts/exaqm_post.sh | 18 +----------------- 33 files changed, 151 insertions(+), 141 deletions(-) diff --git a/ecf/scripts/forecast/jaqm_forecast.ecf b/ecf/scripts/forecast/jaqm_forecast.ecf index 7b75cb272e..49f03d3ef1 100644 --- a/ecf/scripts/forecast/jaqm_forecast.ecf +++ b/ecf/scripts/forecast/jaqm_forecast.ecf @@ -42,10 +42,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf b/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf index b018e43fcd..3427f9caee 100644 --- a/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf +++ b/ecf/scripts/forecast/jaqm_forecast_data_cleanup.ecf @@ -18,12 +18,6 @@ model=aqm module list export cyc=%CYC% -export cycle=t${cyc}z -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf index 12f45868e7..8afb0ae998 100644 --- a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf @@ -33,13 +33,13 @@ module load nco/${nco_ver} module list export nspt="%NSPT%" -export subcyc=0 -export SLASH_ENSMEM_SUBDIR='' -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh +#export subcyc=0 +#export SLASH_ENSMEM_SUBDIR='' +#export pid=${pid:-$$} +#export outid=${outid:-"LL$job"} +#export DATA=${DATA:-${DATAROOT}/${jobid:?}} +#. ${HOMEaqm}/parm/config/var_defns.sh +#. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf index acd9503ce4..efa28a8f5b 100644 --- a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf @@ -30,12 +30,12 @@ module load nco/${nco_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh +#export subcyc=0 +#export pid=${pid:-$$} +#export outid=${outid:-"LL$job"} +#export DATA=${DATA:-${DATAROOT}/${jobid:?}} +#. ${HOMEaqm}/parm/config/var_defns.sh +#. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/post/jaqm_post_master.ecf b/ecf/scripts/post/jaqm_post_master.ecf index 70333e1552..091080ebe4 100644 --- a/ecf/scripts/post/jaqm_post_master.ecf +++ b/ecf/scripts/post/jaqm_post_master.ecf @@ -29,13 +29,7 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 export fhr="%FHR%" -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/prep/jaqm_ics.ecf b/ecf/scripts/prep/jaqm_ics.ecf index e96fd91928..c2bc96cda2 100644 --- a/ecf/scripts/prep/jaqm_ics.ecf +++ b/ecf/scripts/prep/jaqm_ics.ecf @@ -30,19 +30,6 @@ module load nco/${nco_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - -#SDATE=$($NDATE -6 ${PDY}${cyc}) -#PDYS_P1=$(echo $SDATE | cut -c1-8) -#cycs_p1=$(echo $SDATE | cut -c9-10) -#export PREV_CYCLE_DIR=$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYS_P1}/${cycs_p1}) -#export GLOBAL_VAR_DEFNS_FP="$HOMEaqm/parm/config/var_defns.sh" - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/prep/jaqm_lbcs.ecf b/ecf/scripts/prep/jaqm_lbcs.ecf index 9b4f40426f..f621e21e41 100644 --- a/ecf/scripts/prep/jaqm_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_lbcs.ecf @@ -33,7 +33,6 @@ module load nemsiogfs/${nemsiogfs_ver} module load sigio/${sigio_ver} module load wgrib2/${wgrib2_ver} module load w3emc/${w3emc_ver} -module load w3nco/${w3nco_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} @@ -42,13 +41,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/prep/jaqm_make_ics.ecf b/ecf/scripts/prep/jaqm_make_ics.ecf index 0a9bce8210..be816ca700 100644 --- a/ecf/scripts/prep/jaqm_make_ics.ecf +++ b/ecf/scripts/prep/jaqm_make_ics.ecf @@ -27,7 +27,6 @@ module load netcdf/${netcdf_ver} module load bacio/${bacio_ver} module load nemsio/${nemsio_ver} module load w3emc/${w3emc_ver} -module load w3nco/${w3nco_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} @@ -36,21 +35,17 @@ module load prod_util/${prod_util_ver} module list -export subcyc='00' -export SLASH_ENSMEM_SUBDIR='' -export ENSMEM_INDX='##' - -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ ${HOMEaqm}/jobs/JAQM_MAKE_ICS +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + %include %manual diff --git a/ecf/scripts/prep/jaqm_make_lbcs.ecf b/ecf/scripts/prep/jaqm_make_lbcs.ecf index c54f505d23..34a0f95c81 100644 --- a/ecf/scripts/prep/jaqm_make_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_make_lbcs.ecf @@ -27,7 +27,6 @@ module load netcdf/${netcdf_ver} module load bacio/${bacio_ver} module load nemsio/${nemsio_ver} module load w3emc/${w3emc_ver} -module load w3nco/${w3nco_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} @@ -36,15 +35,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc='00' -export SLASH_ENSMEM_SUBDIR='' -export ENSMEM_INDX='##' -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/product/jaqm_bias_correction_o3.ecf b/ecf/scripts/product/jaqm_bias_correction_o3.ecf index e5fbda60d6..0eed947420 100644 --- a/ecf/scripts/product/jaqm_bias_correction_o3.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_o3.ecf @@ -31,13 +31,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf index c11d2e1645..1e9d394012 100644 --- a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf @@ -31,13 +31,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/product/jaqm_post_stat_o3.ecf b/ecf/scripts/product/jaqm_post_stat_o3.ecf index 98bf21b590..79267cb44d 100644 --- a/ecf/scripts/product/jaqm_post_stat_o3.ecf +++ b/ecf/scripts/product/jaqm_post_stat_o3.ecf @@ -31,13 +31,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/product/jaqm_post_stat_pm25.ecf b/ecf/scripts/product/jaqm_post_stat_pm25.ecf index 75b8e4d3ad..5719711691 100644 --- a/ecf/scripts/product/jaqm_post_stat_pm25.ecf +++ b/ecf/scripts/product/jaqm_post_stat_pm25.ecf @@ -31,13 +31,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/product/jaqm_pre_post_stat.ecf b/ecf/scripts/product/jaqm_pre_post_stat.ecf index e1bfdbd957..c2c1825a31 100644 --- a/ecf/scripts/product/jaqm_pre_post_stat.ecf +++ b/ecf/scripts/product/jaqm_pre_post_stat.ecf @@ -30,13 +30,6 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf index 247af702d0..5db12a18fc 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf @@ -30,12 +30,12 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh +#export subcyc=0 +#export pid=${pid:-$$} +#export outid=${outid:-"LL$job"} +#export DATA=${DATA:-${DATAROOT}/${jobid:?}} +#. ${HOMEaqm}/parm/config/var_defns.sh +#. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf index 2d7a94a6cd..745e804217 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf @@ -25,12 +25,12 @@ module load prod_util/${prod_util_ver} module list -export subcyc=0 -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -. ${HOMEaqm}/parm/config/var_defns.sh -. ${HOMEaqm}/ush/source_util_funcs.sh +#export subcyc=0 +#export pid=${pid:-$$} +#export outid=${outid:-"LL$job"} +#export DATA=${DATA:-${DATAROOT}/${jobid:?}} +#. ${HOMEaqm}/parm/config/var_defns.sh +#. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 80b6691264..05fedcd473 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -8,7 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index cdbcddc7bd..f8a06f0f61 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -8,7 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_DATA_CLEANUP b/jobs/JAQM_DATA_CLEANUP index 89abf0bbc7..5505559b0d 100755 --- a/jobs/JAQM_DATA_CLEANUP +++ b/jobs/JAQM_DATA_CLEANUP @@ -7,10 +7,17 @@ set -xue ############################################## # Obtain unique process id (pid) and make temp directory ############################################## +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} +export USHaqm=${USHaqm:-${HOMEaqm}/ush} + +export cycle=t${cyc}z export pid=${pid:-$$} export outid=${outid:-"LL$job"} - export DATA=${DATA:-${DATAROOT}/${jobid:?}} + +. ${PARMaqm}/config/var_defns.sh +. ${USHaqm}/source_util_funcs.sh + mkdir -p $DATA cd $DATA diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index dd61c6484f..ff042b24dc 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -8,7 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 4f31710d21..4d4724da4f 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -8,7 +8,11 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 4bb2b18ae3..f008bf4f10 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -7,8 +7,17 @@ set -xue #----------------------------------------------------------------------- # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- -export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export USHaqm=${EXECaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc='00' +export SLASH_ENSMEM_SUBDIR='' +export ENSMEM_INDX='##' + +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index cfc5248f16..51c25068b3 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -7,8 +7,17 @@ set -xue #----------------------------------------------------------------------- # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- -export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export USHaqm=${EXECaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc='00' +export SLASH_ENSMEM_SUBDIR='' +export ENSMEM_INDX='##' + +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 8e8dedfae2..2c93a17252 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -7,9 +7,18 @@ set -xue #----------------------------------------------------------------------- # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- -export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export USHaqm=${EXECaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc='00' +export SLASH_ENSMEM_SUBDIR='' +export ENSMEM_INDX='##' -. $USHaqm/source_util_funcs.sh +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +. ${PARMaqm}/config/var_defns.sh +. ${USHaqm}/source_util_funcs.sh source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" @@ -28,7 +37,6 @@ export CDATE=${PDY}${cyc} . $USHaqm/job_preamble.sh export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} -export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index e2f6f327d3..296c40a736 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -9,7 +9,16 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc='00' +export SLASH_ENSMEM_SUBDIR='' +export ENSMEM_INDX='##' + +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 8ff6a0022a..5c97b8c999 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -8,8 +8,17 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export SLASH_ENSMEM_SUBDIR='' +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh + source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index 4715f3bf7b..a2a6eb5be4 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -8,6 +8,14 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export SLASH_ENSMEM_SUBDIR='' +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 032f6d9a73..3035be820e 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -8,7 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 8424139b8d..12c8048827 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -10,7 +10,13 @@ set -xue # #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 8819203b0a..656bc38500 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -5,6 +5,13 @@ export PS4=' $SECONDS + ' set -xue export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index 817a151e6b..b382313f39 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -8,7 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index a36e09bbc3..4ad738e22a 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -8,6 +8,13 @@ set -xue # Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} +export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} + +export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +. ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} diff --git a/scripts/exaqm_post.sh b/scripts/exaqm_post.sh index b43b0a2b01..ffda82a371 100755 --- a/scripts/exaqm_post.sh +++ b/scripts/exaqm_post.sh @@ -169,26 +169,10 @@ if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then mnts_secs_str=":${fmn}:00" fi fi -# -# Set the names of the forecast model's write-component output files. -# -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - DATAFCST=$DATAROOT/${RUN}_forecast${dot_ensmem/./_}_${cyc}.${share_pid} - if [ ! -d ${DATAFCST} ]; then - echo "Fatal error DATAFCST not found in production mode" - exit 7 - fi -else - DATAFCST=$DATAROOT/run_fcst${dot_ensmem/./_}.${share_pid} -fi -if [ "${CPL_AQM}" = "TRUE" ]; then dyn_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}${mnts_secs_str}.nc" phy_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}${mnts_secs_str}.nc" -else - dyn_file="${DATAFCST}/dynf${fhr}${mnts_secs_str}.nc" - phy_file="${DATAFCST}/phyf${fhr}${mnts_secs_str}.nc" -fi + # # Set parameters that specify the actual time (not forecast time) of the # output. From 6a09a21bbd454ba76f3b29e1062d6400fc7e2110 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Sun, 19 Nov 2023 15:47:40 +0000 Subject: [PATCH 04/24] revised ecflow scripts and update J-job and ex-scripts and delete unreleated ufs files --- ecf/scripts/nexus/jaqm_nexus_emission_00.ecf | 1 + ecf/scripts/nexus/jaqm_nexus_emission_01.ecf | 1 + ecf/scripts/nexus/jaqm_nexus_emission_02.ecf | 1 + ecf/scripts/nexus/jaqm_nexus_emission_03.ecf | 1 + ecf/scripts/nexus/jaqm_nexus_emission_04.ecf | 1 + ecf/scripts/nexus/jaqm_nexus_emission_05.ecf | 1 + .../nexus/jaqm_nexus_emission_master.ecf | 12 - ecf/scripts/nexus/jaqm_nexus_post_split.ecf | 11 - ecf/scripts/post/jaqm_post_master.ecf | 1 - ecf/scripts/prep/jaqm_ics.ecf | 1 - ecf/scripts/prep/jaqm_lbcs.ecf | 11 +- ecf/scripts/prep/jaqm_make_ics.ecf | 9 +- ecf/scripts/prep/jaqm_make_lbcs.ecf | 7 - .../product/jaqm_bias_correction_o3.ecf | 2 - .../product/jaqm_bias_correction_pm25.ecf | 2 - ecf/scripts/product/jaqm_post_stat_o3.ecf | 2 - ecf/scripts/product/jaqm_post_stat_pm25.ecf | 2 - ecf/scripts/product/jaqm_pre_post_stat.ecf | 1 - .../pts_fire_emis/jaqm_fire_emission.ecf | 1 - .../pts_fire_emis/jaqm_point_source.ecf | 8 - jobs/JAQM_BIAS_CORRECTION_O3 | 1 - jobs/JAQM_BIAS_CORRECTION_PM25 | 1 - jobs/JAQM_FIRE_EMISSION | 1 - jobs/JAQM_FORECAST | 1 - jobs/JAQM_ICS | 1 - jobs/JAQM_LBCS | 1 - jobs/JAQM_MAKE_ICS | 1 - jobs/JAQM_MAKE_LBCS | 1 - jobs/JAQM_NEXUS_EMISSION | 2 - jobs/JAQM_NEXUS_POST_SPLIT | 1 - jobs/JAQM_POINT_SOURCE | 1 - jobs/JAQM_POST | 1 - jobs/JAQM_POST_STAT_O3 | 1 - jobs/JAQM_POST_STAT_PM25 | 1 - jobs/JAQM_PRE_POST_STAT | 2 - modulefiles/tasks/.gitignore | 6 - modulefiles/tasks/wcoss2/aqm_ics.local.lua | 6 - modulefiles/tasks/wcoss2/aqm_lbcs.local.lua | 27 - .../tasks/wcoss2/aqm_lbcs.local.lua_jh | 27 - .../tasks/wcoss2/aqm_manager.local.lua | 1 - .../tasks/wcoss2/bias_correction_o3.local.lua | 17 - .../wcoss2/bias_correction_pm25.local.lua | 17 - .../tasks/wcoss2/data_cleanup.local.lua | 1 - .../tasks/wcoss2/fire_emission.local.lua | 6 - .../tasks/wcoss2/get_extrn_ics.local.lua | 1 - .../tasks/wcoss2/get_extrn_lbcs.local.lua | 1 - modulefiles/tasks/wcoss2/make_grid.local.lua | 26 - modulefiles/tasks/wcoss2/make_ics.local.lua | 17 - modulefiles/tasks/wcoss2/make_lbcs.local.lua | 17 - modulefiles/tasks/wcoss2/make_orog.local.lua | 27 - .../tasks/wcoss2/make_sfc_climo.local.lua | 26 - .../tasks/wcoss2/nexus_emission.local.lua | 17 - .../tasks/wcoss2/nexus_gfs_sfc.local.lua | 1 - .../tasks/wcoss2/nexus_post_split.local.lua | 8 - .../tasks/wcoss2/point_source.local.lua | 1 - .../tasks/wcoss2/post_stat_o3.local.lua | 17 - .../tasks/wcoss2/post_stat_pm25.local.lua | 17 - .../tasks/wcoss2/pre_post_stat.local.lua | 6 - .../tasks/wcoss2/python_regional_workflow.lua | 5 - modulefiles/tasks/wcoss2/run_fcst.local.lua | 27 - modulefiles/tasks/wcoss2/run_post.local.lua | 14 - parm/diag_table_aqm.FV3_GFS_v15p2 | 511 --- parm/field_table_aqm.FV3_GFS_v15p2 | 979 ------ sorc/auto_build_exec.sh | 5 + ush/UFS_plot_domains.py | 203 -- ush/calculate_cost.py | 107 - ush/check_python_version.py | 57 - ush/check_ruc_lsm.py | 41 - ush/cmp_expt_to_baseline.sh | 411 --- ush/cmp_rundirs_ncfiles.sh | 100 - ush/config.aqm.nco.realtime.yaml | 117 - ush/config_defaults.yaml | 3053 ----------------- ush/constants.yaml | 71 - ush/generate_FV3LAM_wflow.py | 963 ------ ush/get_crontab_contents.py | 240 -- ush/get_layout.sh | 147 - ush/launch_FV3LAM_wflow.sh | 404 --- ush/link_fix.py | 458 --- ush/load_modules_wflow.sh | 73 - ush/make_grid_mosaic_file.sh | 189 - ush/mrms_pull_topofhour.py | 169 - ush/predef_grid_params.yaml | 942 ----- ush/run_srw_tests.py | 62 - ush/set_FV3nml_ens_stoch_seeds.py | 202 -- ush/set_FV3nml_sfc_climo_filenames.py | 171 - ush/set_gridparams_ESGgrid.py | 116 - ush/set_gridparams_GFDLgrid.py | 480 --- ush/set_ozone_param.py | 165 - ush/set_predef_grid_params.py | 75 - ush/set_vx_fhr_list.sh | 282 -- ush/set_vx_params.sh | 276 -- ush/setup.py | 1399 -------- ush/update_input_nml.py | 162 - ush/valid_param_vals.yaml | 108 - 94 files changed, 13 insertions(+), 13154 deletions(-) create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_00.ecf create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_01.ecf create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_02.ecf create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_03.ecf create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_04.ecf create mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_05.ecf delete mode 100644 modulefiles/tasks/.gitignore delete mode 100644 modulefiles/tasks/wcoss2/aqm_ics.local.lua delete mode 100644 modulefiles/tasks/wcoss2/aqm_lbcs.local.lua delete mode 100644 modulefiles/tasks/wcoss2/aqm_lbcs.local.lua_jh delete mode 100644 modulefiles/tasks/wcoss2/aqm_manager.local.lua delete mode 100644 modulefiles/tasks/wcoss2/bias_correction_o3.local.lua delete mode 100644 modulefiles/tasks/wcoss2/bias_correction_pm25.local.lua delete mode 100644 modulefiles/tasks/wcoss2/data_cleanup.local.lua delete mode 100644 modulefiles/tasks/wcoss2/fire_emission.local.lua delete mode 100644 modulefiles/tasks/wcoss2/get_extrn_ics.local.lua delete mode 100644 modulefiles/tasks/wcoss2/get_extrn_lbcs.local.lua delete mode 100644 modulefiles/tasks/wcoss2/make_grid.local.lua delete mode 100644 modulefiles/tasks/wcoss2/make_ics.local.lua delete mode 100644 modulefiles/tasks/wcoss2/make_lbcs.local.lua delete mode 100644 modulefiles/tasks/wcoss2/make_orog.local.lua delete mode 100644 modulefiles/tasks/wcoss2/make_sfc_climo.local.lua delete mode 100644 modulefiles/tasks/wcoss2/nexus_emission.local.lua delete mode 100644 modulefiles/tasks/wcoss2/nexus_gfs_sfc.local.lua delete mode 100644 modulefiles/tasks/wcoss2/nexus_post_split.local.lua delete mode 100644 modulefiles/tasks/wcoss2/point_source.local.lua delete mode 100644 modulefiles/tasks/wcoss2/post_stat_o3.local.lua delete mode 100644 modulefiles/tasks/wcoss2/post_stat_pm25.local.lua delete mode 100644 modulefiles/tasks/wcoss2/pre_post_stat.local.lua delete mode 100644 modulefiles/tasks/wcoss2/python_regional_workflow.lua delete mode 100644 modulefiles/tasks/wcoss2/run_fcst.local.lua delete mode 100644 modulefiles/tasks/wcoss2/run_post.local.lua delete mode 100644 parm/diag_table_aqm.FV3_GFS_v15p2 delete mode 100644 parm/field_table_aqm.FV3_GFS_v15p2 create mode 100755 sorc/auto_build_exec.sh delete mode 100755 ush/UFS_plot_domains.py delete mode 100755 ush/calculate_cost.py delete mode 100755 ush/check_python_version.py delete mode 100755 ush/check_ruc_lsm.py delete mode 100755 ush/cmp_expt_to_baseline.sh delete mode 100755 ush/cmp_rundirs_ncfiles.sh delete mode 100755 ush/config.aqm.nco.realtime.yaml delete mode 100755 ush/config_defaults.yaml delete mode 100755 ush/constants.yaml delete mode 100755 ush/generate_FV3LAM_wflow.py delete mode 100755 ush/get_crontab_contents.py delete mode 100755 ush/get_layout.sh delete mode 100755 ush/launch_FV3LAM_wflow.sh delete mode 100755 ush/link_fix.py delete mode 100755 ush/load_modules_wflow.sh delete mode 100755 ush/make_grid_mosaic_file.sh delete mode 100755 ush/mrms_pull_topofhour.py delete mode 100755 ush/predef_grid_params.yaml delete mode 100755 ush/run_srw_tests.py delete mode 100755 ush/set_FV3nml_ens_stoch_seeds.py delete mode 100755 ush/set_FV3nml_sfc_climo_filenames.py delete mode 100755 ush/set_gridparams_ESGgrid.py delete mode 100755 ush/set_gridparams_GFDLgrid.py delete mode 100755 ush/set_ozone_param.py delete mode 100755 ush/set_predef_grid_params.py delete mode 100755 ush/set_vx_fhr_list.sh delete mode 100755 ush/set_vx_params.sh delete mode 100755 ush/setup.py delete mode 100755 ush/update_input_nml.py delete mode 100755 ush/valid_param_vals.yaml diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf new file mode 120000 index 0000000000..8868b7f417 --- /dev/null +++ b/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf @@ -0,0 +1 @@ +jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf index 8afb0ae998..c241c9c2e3 100644 --- a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf @@ -23,23 +23,11 @@ module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} -module load esmf/${esmf_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} -module load udunits/${udunits_ver} -module load gsl/${gsl_ver} -module load nco/${nco_ver} module list export nspt="%NSPT%" -#export subcyc=0 -#export SLASH_ENSMEM_SUBDIR='' -#export pid=${pid:-$$} -#export outid=${outid:-"LL$job"} -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#. ${HOMEaqm}/parm/config/var_defns.sh -#. ${HOMEaqm}/ush/source_util_funcs.sh ############################################################ # CALL executable job script here diff --git a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf index efa28a8f5b..b34477c7da 100644 --- a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf @@ -23,20 +23,9 @@ module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} -module load udunits/${udunits_ver} -module load gsl/${gsl_ver} -module load nco/${nco_ver} module list -#export subcyc=0 -#export pid=${pid:-$$} -#export outid=${outid:-"LL$job"} -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#. ${HOMEaqm}/parm/config/var_defns.sh -#. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/post/jaqm_post_master.ecf b/ecf/scripts/post/jaqm_post_master.ecf index 091080ebe4..65c7796cc8 100644 --- a/ecf/scripts/post/jaqm_post_master.ecf +++ b/ecf/scripts/post/jaqm_post_master.ecf @@ -25,7 +25,6 @@ module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load libjpeg/${libjpeg_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/prep/jaqm_ics.ecf b/ecf/scripts/prep/jaqm_ics.ecf index c2bc96cda2..cdcac8c50a 100644 --- a/ecf/scripts/prep/jaqm_ics.ecf +++ b/ecf/scripts/prep/jaqm_ics.ecf @@ -23,7 +23,6 @@ module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} diff --git a/ecf/scripts/prep/jaqm_lbcs.ecf b/ecf/scripts/prep/jaqm_lbcs.ecf index f621e21e41..df4d7ed97c 100644 --- a/ecf/scripts/prep/jaqm_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_lbcs.ecf @@ -21,23 +21,14 @@ module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load jasper/${jasper_ver} -module load zlib/${zlib_ver} -module load libpng/${libpng_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} -module load bacio/${bacio_ver} -module load bufr/${bufr_ver} -module load g2/${g2_ver} module load nemsio/${nemsio_ver} -module load nemsiogfs/${nemsiogfs_ver} -module load sigio/${sigio_ver} -module load wgrib2/${wgrib2_ver} -module load w3emc/${w3emc_ver} +#module load nemsiogfs/${nemsiogfs_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/prep/jaqm_make_ics.ecf b/ecf/scripts/prep/jaqm_make_ics.ecf index be816ca700..36d6472522 100644 --- a/ecf/scripts/prep/jaqm_make_ics.ecf +++ b/ecf/scripts/prep/jaqm_make_ics.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 -#PBS -l select=4:mpiprocs=12:ncpus=12 +#PBS -l select=1:mpiprocs=128:ncpus=128 #PBS -l place=vscatter:exclhost #PBS -l debug=true @@ -24,14 +24,7 @@ module load cray-pals/${cray_pals_ver} module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} -module load bacio/${bacio_ver} -module load nemsio/${nemsio_ver} -module load w3emc/${w3emc_ver} -module load udunits/${udunits_ver} -module load gsl/${gsl_ver} -module load nco/${nco_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/prep/jaqm_make_lbcs.ecf b/ecf/scripts/prep/jaqm_make_lbcs.ecf index 34a0f95c81..2eb2f0c7c9 100644 --- a/ecf/scripts/prep/jaqm_make_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_make_lbcs.ecf @@ -24,14 +24,7 @@ module load cray-pals/${cray_pals_ver} module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} -module load bacio/${bacio_ver} -module load nemsio/${nemsio_ver} -module load w3emc/${w3emc_ver} -module load udunits/${udunits_ver} -module load gsl/${gsl_ver} -module load nco/${nco_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/product/jaqm_bias_correction_o3.ecf b/ecf/scripts/product/jaqm_bias_correction_o3.ecf index 0eed947420..971839e83c 100644 --- a/ecf/scripts/product/jaqm_bias_correction_o3.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_o3.ecf @@ -24,10 +24,8 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load wgrib2/${wgrib2_ver} -module load bufr/${bufr_ver} module load grib_util/${grib_util_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf index 1e9d394012..249d71646e 100644 --- a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf @@ -24,10 +24,8 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load wgrib2/${wgrib2_ver} -module load bufr/${bufr_ver} module load grib_util/${grib_util_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/product/jaqm_post_stat_o3.ecf b/ecf/scripts/product/jaqm_post_stat_o3.ecf index 79267cb44d..7fcfdefb1d 100644 --- a/ecf/scripts/product/jaqm_post_stat_o3.ecf +++ b/ecf/scripts/product/jaqm_post_stat_o3.ecf @@ -24,10 +24,8 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load wgrib2/${wgrib2_ver} -module load bufr/${bufr_ver} module load grib_util/${grib_util_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/product/jaqm_post_stat_pm25.ecf b/ecf/scripts/product/jaqm_post_stat_pm25.ecf index 5719711691..6166e62193 100644 --- a/ecf/scripts/product/jaqm_post_stat_pm25.ecf +++ b/ecf/scripts/product/jaqm_post_stat_pm25.ecf @@ -24,10 +24,8 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load wgrib2/${wgrib2_ver} -module load bufr/${bufr_ver} module load grib_util/${grib_util_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/product/jaqm_pre_post_stat.ecf b/ecf/scripts/product/jaqm_pre_post_stat.ecf index c2c1825a31..c57be052e6 100644 --- a/ecf/scripts/product/jaqm_pre_post_stat.ecf +++ b/ecf/scripts/product/jaqm_pre_post_stat.ecf @@ -26,7 +26,6 @@ module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf index 5db12a18fc..73555c3a23 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf @@ -26,7 +26,6 @@ module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list diff --git a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf index 745e804217..0886281956 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf @@ -21,17 +21,9 @@ module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list -#export subcyc=0 -#export pid=${pid:-$$} -#export outid=${outid:-"LL$job"} -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#. ${HOMEaqm}/parm/config/var_defns.sh -#. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 05fedcd473..8d9a45d646 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -16,7 +16,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index f8a06f0f61..d98f56fa2b 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -16,7 +16,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index ff042b24dc..1251670476 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -16,7 +16,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 4d4724da4f..e17b13c26c 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -14,7 +14,6 @@ export subcyc=0 . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index f008bf4f10..1cb54e7b9c 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -19,7 +19,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 51c25068b3..8e5515a4aa 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -19,7 +19,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 2c93a17252..df8cf5a99c 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -19,7 +19,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . ${USHaqm}/source_util_funcs.sh -source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 296c40a736..566296b26d 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -20,7 +20,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 5c97b8c999..d9b4e4e5f6 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -19,8 +19,6 @@ export outid=${outid:-"LL$job"} . $USHaqm/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} - export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index a2a6eb5be4..e48efea9ac 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -18,7 +18,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 3035be820e..d205fee77a 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -16,7 +16,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 12c8048827..a7f3e15e5c 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -18,7 +18,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 656bc38500..5074f3ac2e 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -14,7 +14,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index b382313f39..bdbdbdd2dd 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -16,7 +16,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 4ad738e22a..2b3b03585b 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -15,9 +15,7 @@ export pid=${pid:-$$} export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh - . $USHaqm/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/modulefiles/tasks/.gitignore b/modulefiles/tasks/.gitignore deleted file mode 100644 index ba3584b344..0000000000 --- a/modulefiles/tasks/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -make_orog -make_ics -make_lbcs -make_sfc_climo -run_fcst -run_post diff --git a/modulefiles/tasks/wcoss2/aqm_ics.local.lua b/modulefiles/tasks/wcoss2/aqm_ics.local.lua deleted file mode 100644 index bbff3a76ba..0000000000 --- a/modulefiles/tasks/wcoss2/aqm_ics.local.lua +++ /dev/null @@ -1,6 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua b/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua deleted file mode 100644 index ea12b4c318..0000000000 --- a/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua +++ /dev/null @@ -1,27 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("nemsiogfs", os.getenv("nemsiogfs_ver"))) -load(pathJoin("sigio", os.getenv("sigio_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("jasper", os.getenv("jasper_ver"))) -load(pathJoin("w3nco", os.getenv("w3nco_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua_jh b/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua_jh deleted file mode 100644 index e0e08a6aa5..0000000000 --- a/modulefiles/tasks/wcoss2/aqm_lbcs.local.lua_jh +++ /dev/null @@ -1,27 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("nemsiogfs", os.getenv("nemsiogfs_ver"))) -load(pathJoin("sigio", os.getenv("sigio_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("jasper", os.getenv("jasper_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) -load(pathJoin("w3nco", os.getenv("w3nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/aqm_manager.local.lua b/modulefiles/tasks/wcoss2/aqm_manager.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/aqm_manager.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/bias_correction_o3.local.lua b/modulefiles/tasks/wcoss2/bias_correction_o3.local.lua deleted file mode 100644 index 579886582b..0000000000 --- a/modulefiles/tasks/wcoss2/bias_correction_o3.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("grib_util", os.getenv("grib_util_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/modulefiles/tasks/wcoss2/bias_correction_pm25.local.lua b/modulefiles/tasks/wcoss2/bias_correction_pm25.local.lua deleted file mode 100644 index 579886582b..0000000000 --- a/modulefiles/tasks/wcoss2/bias_correction_pm25.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("grib_util", os.getenv("grib_util_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/modulefiles/tasks/wcoss2/data_cleanup.local.lua b/modulefiles/tasks/wcoss2/data_cleanup.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/data_cleanup.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/fire_emission.local.lua b/modulefiles/tasks/wcoss2/fire_emission.local.lua deleted file mode 100644 index bbff3a76ba..0000000000 --- a/modulefiles/tasks/wcoss2/fire_emission.local.lua +++ /dev/null @@ -1,6 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/get_extrn_ics.local.lua b/modulefiles/tasks/wcoss2/get_extrn_ics.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/get_extrn_ics.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/get_extrn_lbcs.local.lua b/modulefiles/tasks/wcoss2/get_extrn_lbcs.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/get_extrn_lbcs.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/make_grid.local.lua b/modulefiles/tasks/wcoss2/make_grid.local.lua deleted file mode 100644 index 29f7604ddf..0000000000 --- a/modulefiles/tasks/wcoss2/make_grid.local.lua +++ /dev/null @@ -1,26 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("sfcio", os.getenv("sfcio_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("sigio", os.getenv("sigio_ver"))) -load(pathJoin("sp", os.getenv("sp_ver"))) -load(pathJoin("ip", os.getenv("ip_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("esmf", os.getenv("esmf_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/make_ics.local.lua b/modulefiles/tasks/wcoss2/make_ics.local.lua deleted file mode 100644 index c1ebb8d74c..0000000000 --- a/modulefiles/tasks/wcoss2/make_ics.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) - -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("w3nco", os.getenv("w3nco_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/make_lbcs.local.lua b/modulefiles/tasks/wcoss2/make_lbcs.local.lua deleted file mode 100644 index c1ebb8d74c..0000000000 --- a/modulefiles/tasks/wcoss2/make_lbcs.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) - -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("w3nco", os.getenv("w3nco_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/make_orog.local.lua b/modulefiles/tasks/wcoss2/make_orog.local.lua deleted file mode 100644 index bec0587dfb..0000000000 --- a/modulefiles/tasks/wcoss2/make_orog.local.lua +++ /dev/null @@ -1,27 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("sfcio", os.getenv("sfcio_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("sigio", os.getenv("sigio_ver"))) -load(pathJoin("sp", os.getenv("sp_ver"))) -load(pathJoin("ip", os.getenv("ip_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("esmf", os.getenv("esmf_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) - diff --git a/modulefiles/tasks/wcoss2/make_sfc_climo.local.lua b/modulefiles/tasks/wcoss2/make_sfc_climo.local.lua deleted file mode 100644 index 29f7604ddf..0000000000 --- a/modulefiles/tasks/wcoss2/make_sfc_climo.local.lua +++ /dev/null @@ -1,26 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("sfcio", os.getenv("sfcio_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("nemsio", os.getenv("nemsio_ver"))) -load(pathJoin("sigio", os.getenv("sigio_ver"))) -load(pathJoin("sp", os.getenv("sp_ver"))) -load(pathJoin("ip", os.getenv("ip_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("esmf", os.getenv("esmf_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/nexus_emission.local.lua b/modulefiles/tasks/wcoss2/nexus_emission.local.lua deleted file mode 100644 index 84e930a7c4..0000000000 --- a/modulefiles/tasks/wcoss2/nexus_emission.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("esmf", os.getenv("esmf_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/nexus_gfs_sfc.local.lua b/modulefiles/tasks/wcoss2/nexus_gfs_sfc.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/nexus_gfs_sfc.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/nexus_post_split.local.lua b/modulefiles/tasks/wcoss2/nexus_post_split.local.lua deleted file mode 100644 index e0846a5115..0000000000 --- a/modulefiles/tasks/wcoss2/nexus_post_split.local.lua +++ /dev/null @@ -1,8 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/point_source.local.lua b/modulefiles/tasks/wcoss2/point_source.local.lua deleted file mode 100644 index 3370fa018b..0000000000 --- a/modulefiles/tasks/wcoss2/point_source.local.lua +++ /dev/null @@ -1 +0,0 @@ -load("python_regional_workflow") diff --git a/modulefiles/tasks/wcoss2/post_stat_o3.local.lua b/modulefiles/tasks/wcoss2/post_stat_o3.local.lua deleted file mode 100644 index 579886582b..0000000000 --- a/modulefiles/tasks/wcoss2/post_stat_o3.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("grib_util", os.getenv("grib_util_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/modulefiles/tasks/wcoss2/post_stat_pm25.local.lua b/modulefiles/tasks/wcoss2/post_stat_pm25.local.lua deleted file mode 100644 index 579886582b..0000000000 --- a/modulefiles/tasks/wcoss2/post_stat_pm25.local.lua +++ /dev/null @@ -1,17 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) - -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -load(pathJoin("bufr", os.getenv("bufr_ver"))) - -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("grib_util", os.getenv("grib_util_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/modulefiles/tasks/wcoss2/pre_post_stat.local.lua b/modulefiles/tasks/wcoss2/pre_post_stat.local.lua deleted file mode 100644 index bbff3a76ba..0000000000 --- a/modulefiles/tasks/wcoss2/pre_post_stat.local.lua +++ /dev/null @@ -1,6 +0,0 @@ -load("python_regional_workflow") - -load(pathJoin("udunits", os.getenv("udunits_ver"))) -load(pathJoin("gsl", os.getenv("gsl_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("nco", os.getenv("nco_ver"))) diff --git a/modulefiles/tasks/wcoss2/python_regional_workflow.lua b/modulefiles/tasks/wcoss2/python_regional_workflow.lua deleted file mode 100644 index 0a3bc3207b..0000000000 --- a/modulefiles/tasks/wcoss2/python_regional_workflow.lua +++ /dev/null @@ -1,5 +0,0 @@ -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("python", os.getenv("python_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/modulefiles/tasks/wcoss2/run_fcst.local.lua b/modulefiles/tasks/wcoss2/run_fcst.local.lua deleted file mode 100644 index 180f3063ed..0000000000 --- a/modulefiles/tasks/wcoss2/run_fcst.local.lua +++ /dev/null @@ -1,27 +0,0 @@ -load("python_regional_workflow") - -unload("cray_mpich") -unload("netcdf") - -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("cmake", os.getenv("cmake_ver"))) -load(pathJoin("jasper", os.getenv("jasper_ver"))) -load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("libpng", os.getenv("libpng_ver"))) -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("fms", os.getenv("fms_ver"))) -load(pathJoin("bacio", os.getenv("bacio_ver"))) -load(pathJoin("crtm", os.getenv("crtm_ver"))) -load(pathJoin("g2", os.getenv("g2_ver"))) -load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) -load(pathJoin("ip", os.getenv("ip_ver"))) -load(pathJoin("sp", os.getenv("sp_ver"))) -load(pathJoin("w3emc", os.getenv("w3emc_ver"))) -load(pathJoin("pio", os.getenv("pio_ver"))) -load(pathJoin("esmf", os.getenv("esmf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) diff --git a/modulefiles/tasks/wcoss2/run_post.local.lua b/modulefiles/tasks/wcoss2/run_post.local.lua deleted file mode 100644 index 7dbf44d15a..0000000000 --- a/modulefiles/tasks/wcoss2/run_post.local.lua +++ /dev/null @@ -1,14 +0,0 @@ -load("python_regional_workflow") - -unload("PrgEnv-intel") -unload("netcdf") - -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) -load(pathJoin("craype", os.getenv("craype_ver"))) -load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) -load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) -load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) -load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/parm/diag_table_aqm.FV3_GFS_v15p2 b/parm/diag_table_aqm.FV3_GFS_v15p2 deleted file mode 100644 index feff256c3f..0000000000 --- a/parm/diag_table_aqm.FV3_GFS_v15p2 +++ /dev/null @@ -1,511 +0,0 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} - -"grid_spec", -1, "months", 1, "days", "time" -#"atmos_4xdaily", 6, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 1, "years", 1, "hours", "time" -"fv3_history2d", 1, "years", 1, "hours", "time" - -# -#======================= -# ATMOSPHERE DIAGNOSTICS -#======================= -### -# grid_spec -### - "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, - "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, -### -# 4x daily output -### -# "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 -#### -# "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 -#### -# "dynamics", "h1000", "h1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h850", "h850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h700", "h700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h500", "h500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h200", "h200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h100", "h100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h50", "h50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "h10", "h10", "atmos_4xdaily", "all", .false., "none", 2 -#### -#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 -#### -# "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 -#### -# "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 -## "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 -## "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 -### -# gfs static data -### - "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 - "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 - "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 - "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 - "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -### -# FV3 variabls needed for NGGPS evaluation -### -"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 -#"gfs_phys", "cnvw", "cnvcldwat", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 -# Stochastic physics -"gfs_phys", "sppt_wts", "sppt_wts", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "skebu_wts", "skebu_wts", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "skebv_wts", "skebv_wts", "fv3_history", "all", .false., "none", 2 -"dynamics", "diss_est", "diss_est", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "shum_wts", "shum_wts", "fv3_history", "all", .false., "none", 2 - -### -# chemical tracers advected by FV3 -### -#"gfs_dyn", "no2", "no2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "no", "no", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "o3", "o3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "no3", "no3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "h2o2", "h2o2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "n2o5", "n2o5", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hno3", "hno3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hono", "hono", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pna", "pna", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "so2", "so2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sulf", "sulf", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pan", "pan", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pacd", "pacd", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aacd", "aacd", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ald2", "ald2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "panx", "panx", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "form", "form", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "mepx", "mepx", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "meoh", "meoh", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rooh", "rooh", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ntr1", "ntr1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ntr2", "ntr2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "facd", "facd", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "co", "co", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aldx", "aldx", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "glyd", "glyd", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "gly", "gly", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "mgly", "mgly", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "etha", "etha", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "etoh", "etoh", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ket", "ket", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "par", "par", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acet", "acet", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "prpa", "prpa", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ethy", "ethy", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "eth", "eth", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ole", "ole", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "iole", "iole", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "isop", "isop", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ispd", "ispd", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "intr", "intr", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ispx", "ispx", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hpld", "hpld", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "opo3", "opo3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "epox", "epox", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "terp", "terp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "benzene", "benzene", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "cres", "cres", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "open", "open", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "tol", "tol", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "xopn", "xopn", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "xylmn", "xylmn", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "naph", "naph", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "cat1", "cat1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "cron", "cron", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "opan", "opan", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ech4", "ech4", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "cl2", "cl2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hocl", "hocl", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "fmcl", "fmcl", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hcl", "hcl", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "clno2", "clno2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sesq", "sesq", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "soaalk", "soaalk", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vlvpo1", "vlvpo1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvpo1", "vsvpo1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvpo2", "vsvpo2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvpo3", "vsvpo3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vivpo1", "vivpo1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vlvoo1", "vlvoo1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vlvoo2", "vlvoo2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvoo1", "vsvoo1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvoo2", "vsvoo2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "vsvoo3", "vsvoo3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pcvoc", "pcvoc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "form_primary", "form_primary", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ald2_primary", "ald2_primary", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "butadiene13", "butadiene13", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acrolein", "acrolein", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acro_primary", "acro_primary", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "tolu", "tolu", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hg", "hg", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "hgiigas", "hgiigas", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aso4j", "aso4j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aso4i", "aso4i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "anh4j", "anh4j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "anh4i", "anh4i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ano3j", "ano3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ano3i", "ano3i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aalk1j", "aalk1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aalk2j", "aalk2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "axyl1j", "axyl1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "axyl2j", "axyl2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "axyl3j", "axyl3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atol1j", "atol1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atol2j", "atol2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atol3j", "atol3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "abnz1j", "abnz1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "abnz2j", "abnz2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "abnz3j", "abnz3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "apah1j", "apah1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "apah2j", "apah2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "apah3j", "apah3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atrp1j", "atrp1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atrp2j", "atrp2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aiso1j", "aiso1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aiso2j", "aiso2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asqtj", "asqtj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aorgcj", "aorgcj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aecj", "aecj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aeci", "aeci", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aothrj", "aothrj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aothri", "aothri", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "afej", "afej", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aalj", "aalj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asij", "asij", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "atij", "atij", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acaj", "acaj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "amgj", "amgj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "akj", "akj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "amnj", "amnj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acors", "acors", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asoil", "asoil", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "numatkn", "numatkn", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "numacc", "numacc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "numcor", "numcor", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "srfatkn", "srfatkn", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "srfacc", "srfacc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "srfcor", "srfcor", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah2oj", "ah2oj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah2oi", "ah2oi", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah3opj", "ah3opj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah3opi", "ah3opi", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "anaj", "anaj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "anai", "anai", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aclj", "aclj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "acli", "acli", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aseacat", "aseacat", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aclk", "aclk", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aso4k", "aso4k", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "anh4k", "anh4k", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ano3k", "ano3k", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah2ok", "ah2ok", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ah3opk", "ah3opk", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aiso3j", "aiso3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aolgaj", "aolgaj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aolgbj", "aolgbj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aglyj", "aglyj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "apcsoj", "apcsoj", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvpo1i", "alvpo1i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvpo1i", "asvpo1i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvpo2i", "asvpo2i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvpo1j", "alvpo1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvpo1j", "asvpo1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvpo2j", "asvpo2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvpo3j", "asvpo3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "aivpo1j", "aivpo1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvoo1i", "alvoo1i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvoo2i", "alvoo2i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvoo1i", "asvoo1i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvoo2i", "asvoo2i", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvoo1j", "alvoo1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "alvoo2j", "alvoo2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvoo1j", "asvoo1j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvoo2j", "asvoo2j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "asvoo3j", "asvoo3j", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "nh3", "nh3", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_alk1", "sv_alk1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_alk2", "sv_alk2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_xyl1", "sv_xyl1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_xyl2", "sv_xyl2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_tol1", "sv_tol1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_tol2", "sv_tol2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_bnz1", "sv_bnz1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_bnz2", "sv_bnz2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_pah1", "sv_pah1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_pah2", "sv_pah2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_trp1", "sv_trp1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_trp2", "sv_trp2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_iso1", "sv_iso1", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_iso2", "sv_iso2", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "sv_sqt", "sv_sqt", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "lv_pcsog", "lv_pcsog", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3_ave", "o3_ave", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no_ave", "no_ave", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no2_ave", "no2_ave", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "pm25_ave", "pm25_ave", "fv3_history", "all", .false., "none", 2 - -### -# aerosol diagnostics -### -#"gfs_dyn", "pm25at", "pm25at", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pm25ac", "pm25ac", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pm25co", "pm25co", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pm25_tot", "pm25_tot", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "aod", "aod", "fv3_history2d", "all", .false., "none", 2 -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/field_table_aqm.FV3_GFS_v15p2 b/parm/field_table_aqm.FV3_GFS_v15p2 deleted file mode 100644 index b8b0bee266..0000000000 --- a/parm/field_table_aqm.FV3_GFS_v15p2 +++ /dev/null @@ -1,979 +0,0 @@ -# added by FRE: sphum must be present in atmos -# specific humidity for moist runs - "TRACER", "atmos_mod", "sphum" - "longname", "specific humidity" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / -# prognostic cloud water mixing ratio - "TRACER", "atmos_mod", "liq_wat" - "longname", "cloud water mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / - "TRACER", "atmos_mod", "rainwat" - "longname", "rain mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / - "TRACER", "atmos_mod", "ice_wat" - "longname", "cloud ice mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / - "TRACER", "atmos_mod", "snowwat" - "longname", "snow mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / - "TRACER", "atmos_mod", "graupel" - "longname", "graupel mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / -# prognostic ozone mixing ratio tracer - "TRACER", "atmos_mod", "o3mr" - "longname", "ozone mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / -# prognostic subgrid scale turbulent kinetic energy - "TRACER", "atmos_mod", "sgs_tke" - "longname", "subgrid scale turbulent kinetic energy" - "units", "m2/s2" - "profile_type", "fixed", "surface_value=0.0" / -# prognostic air quality tracers - "TRACER", "atmos_mod", "NO2" - "longname", "NO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NO" - "longname", "NO" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "O3" - "longname", "O3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NO3" - "longname", "NO3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "H2O2" - "longname", "H2O2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "N2O5" - "longname", "N2O5" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HNO3" - "longname", "HNO3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HONO" - "longname", "HONO" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PNA" - "longname", "PNA" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SO2" - "longname", "SO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SULF" - "longname", "SULF" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PAN" - "longname", "PAN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PACD" - "longname", "PACD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AACD" - "longname", "AACD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALD2" - "longname", "ALD2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PANX" - "longname", "PANX" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "FORM" - "longname", "FORM" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "MEPX" - "longname", "MEPX" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "MEOH" - "longname", "MEOH" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ROOH" - "longname", "ROOH" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NTR1" - "longname", "NTR1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NTR2" - "longname", "NTR2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "FACD" - "longname", "FACD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CO" - "longname", "CO" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALDX" - "longname", "ALDX" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "GLYD" - "longname", "GLYD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "GLY" - "longname", "GLY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "MGLY" - "longname", "MGLY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ETHA" - "longname", "ETHA" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ETOH" - "longname", "ETOH" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "KET" - "longname", "KET" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PAR" - "longname", "PAR" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACET" - "longname", "ACET" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PRPA" - "longname", "PRPA" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ETHY" - "longname", "ETHY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ETH" - "longname", "ETH" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "OLE" - "longname", "OLE" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "IOLE" - "longname", "IOLE" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ISOP" - "longname", "ISOP" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ISPD" - "longname", "ISPD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "INTR" - "longname", "INTR" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ISPX" - "longname", "ISPX" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HPLD" - "longname", "HPLD" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "OPO3" - "longname", "OPO3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "EPOX" - "longname", "EPOX" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "TERP" - "longname", "TERP" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "BENZENE" - "longname", "BENZENE" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CRES" - "longname", "CRES" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "OPEN" - "longname", "OPEN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "TOL" - "longname", "TOL" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "XOPN" - "longname", "XOPN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "XYLMN" - "longname", "XYLMN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NAPH" - "longname", "NAPH" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CAT1" - "longname", "CAT1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CRON" - "longname", "CRON" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "OPAN" - "longname", "OPAN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ECH4" - "longname", "ECH4" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CL2" - "longname", "CL2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HOCL" - "longname", "HOCL" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "FMCL" - "longname", "FMCL" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HCL" - "longname", "HCL" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "CLNO2" - "longname", "CLNO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SESQ" - "longname", "SESQ" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SOAALK" - "longname", "SOAALK" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VLVPO1" - "longname", "VLVPO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVPO1" - "longname", "VSVPO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVPO2" - "longname", "VSVPO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVPO3" - "longname", "VSVPO3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VIVPO1" - "longname", "VIVPO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VLVOO1" - "longname", "VLVOO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VLVOO2" - "longname", "VLVOO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVOO1" - "longname", "VSVOO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVOO2" - "longname", "VSVOO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "VSVOO3" - "longname", "VSVOO3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "PCVOC" - "longname", "PCVOC" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "FORM_PRIMARY" - "longname", "FORM_PRIMARY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALD2_PRIMARY" - "longname", "ALD2_PRIMARY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "BUTADIENE13" - "longname", "BUTADIENE13" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACROLEIN" - "longname", "ACROLEIN" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACRO_PRIMARY" - "longname", "ACRO_PRIMARY" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "TOLU" - "longname", "TOLU" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HG" - "longname", "HG" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "HGIIGAS" - "longname", "HGIIGAS" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASO4J" - "longname", "ASO4J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASO4I" - "longname", "ASO4I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANH4J" - "longname", "ANH4J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANH4I" - "longname", "ANH4I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANO3J" - "longname", "ANO3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANO3I" - "longname", "ANO3I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AALK1J" - "longname", "AALK1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AALK2J" - "longname", "AALK2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AXYL1J" - "longname", "AXYL1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AXYL2J" - "longname", "AXYL2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AXYL3J" - "longname", "AXYL3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATOL1J" - "longname", "ATOL1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATOL2J" - "longname", "ATOL2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATOL3J" - "longname", "ATOL3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ABNZ1J" - "longname", "ABNZ1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ABNZ2J" - "longname", "ABNZ2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ABNZ3J" - "longname", "ABNZ3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "APAH1J" - "longname", "APAH1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "APAH2J" - "longname", "APAH2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "APAH3J" - "longname", "APAH3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATRP1J" - "longname", "ATRP1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATRP2J" - "longname", "ATRP2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AISO1J" - "longname", "AISO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AISO2J" - "longname", "AISO2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASQTJ" - "longname", "ASQTJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AORGCJ" - "longname", "AORGCJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AECJ" - "longname", "AECJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AECI" - "longname", "AECI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AOTHRJ" - "longname", "AOTHRJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AOTHRI" - "longname", "AOTHRI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AFEJ" - "longname", "AFEJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AALJ" - "longname", "AALJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASIJ" - "longname", "ASIJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ATIJ" - "longname", "ATIJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACAJ" - "longname", "ACAJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AMGJ" - "longname", "AMGJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AKJ" - "longname", "AKJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AMNJ" - "longname", "AMNJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACORS" - "longname", "ACORS" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASOIL" - "longname", "ASOIL" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NUMATKN" - "longname", "NUMATKN" - "units", "num/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NUMACC" - "longname", "NUMACC" - "units", "num/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NUMCOR" - "longname", "NUMCOR" - "units", "num/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SRFATKN" - "longname", "SRFATKN" - "units", "m2/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SRFACC" - "longname", "SRFACC" - "units", "m2/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SRFCOR" - "longname", "SRFCOR" - "units", "m2/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH2OJ" - "longname", "AH2OJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH2OI" - "longname", "AH2OI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH3OPJ" - "longname", "AH3OPJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH3OPI" - "longname", "AH3OPI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANAJ" - "longname", "ANAJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANAI" - "longname", "ANAI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACLJ" - "longname", "ACLJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACLI" - "longname", "ACLI" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASEACAT" - "longname", "ASEACAT" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ACLK" - "longname", "ACLK" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASO4K" - "longname", "ASO4K" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANH4K" - "longname", "ANH4K" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ANO3K" - "longname", "ANO3K" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH2OK" - "longname", "AH2OK" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AH3OPK" - "longname", "AH3OPK" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AISO3J" - "longname", "AISO3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AOLGAJ" - "longname", "AOLGAJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AOLGBJ" - "longname", "AOLGBJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AGLYJ" - "longname", "AGLYJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "APCSOJ" - "longname", "APCSOJ" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVPO1I" - "longname", "ALVPO1I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVPO1I" - "longname", "ASVPO1I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVPO2I" - "longname", "ASVPO2I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVPO1J" - "longname", "ALVPO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVPO1J" - "longname", "ASVPO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVPO2J" - "longname", "ASVPO2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVPO3J" - "longname", "ASVPO3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "AIVPO1J" - "longname", "AIVPO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVOO1I" - "longname", "ALVOO1I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVOO2I" - "longname", "ALVOO2I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVOO1I" - "longname", "ASVOO1I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVOO2I" - "longname", "ASVOO2I" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVOO1J" - "longname", "ALVOO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ALVOO2J" - "longname", "ALVOO2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVOO1J" - "longname", "ASVOO1J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVOO2J" - "longname", "ASVOO2J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "ASVOO3J" - "longname", "ASVOO3J" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "NH3" - "longname", "NH3" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_ALK1" - "longname", "SV_ALK1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_ALK2" - "longname", "SV_ALK2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_XYL1" - "longname", "SV_XYL1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_XYL2" - "longname", "SV_XYL2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_TOL1" - "longname", "SV_TOL1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_TOL2" - "longname", "SV_TOL2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_BNZ1" - "longname", "SV_BNZ1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_BNZ2" - "longname", "SV_BNZ2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_PAH1" - "longname", "SV_PAH1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_PAH2" - "longname", "SV_PAH2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_TRP1" - "longname", "SV_TRP1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_TRP2" - "longname", "SV_TRP2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_ISO1" - "longname", "SV_ISO1" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_ISO2" - "longname", "SV_ISO2" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "SV_SQT" - "longname", "SV_SQT" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / - "TRACER", "atmos_mod", "LV_PCSOG" - "longname", "LV_PCSOG" - "units", "ppmV" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=1.e-7" / -# diagnostic air quality tracers - "TRACER", "atmos_mod", "PM25AT" - "longname", "PM2.5 fraction of Aitken mode" - "units", "1" - "tracer_usage", "chemistry", "type=diagnostic" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "PM25AC" - "longname", "PM2.5 fraction of accumulation mode" - "units", "1" - "tracer_usage", "chemistry", "type=diagnostic" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "PM25CO" - "longname", "PM2.5 fraction of coarse mode" - "units", "1" - "tracer_usage", "chemistry", "type=diagnostic" - "profile_type", "fixed", "surface_value=0.0" / -"TRACER", "atmos_mod", "PM25_TOT" - "longname", "PM 2.5 from AQM model" - "units", "ug/m3" - "tracer_usage", "chemistry", "type=diagnostic" - "profile_type", "fixed", "surface_value=0.0" / -# non-prognostic cloud amount - "TRACER", "atmos_mod", "cld_amt" - "longname", "cloud amount" - "units", "1" - "profile_type", "fixed", "surface_value=1.e30" / diff --git a/sorc/auto_build_exec.sh b/sorc/auto_build_exec.sh new file mode 100755 index 0000000000..e64d726a8a --- /dev/null +++ b/sorc/auto_build_exec.sh @@ -0,0 +1,5 @@ +#./app_build.sh -p=wcoss2 --clean + +./app_build.sh -p=wcoss2 -a=ATMAQ --extrn |& tee buildup.log + +#./app_build.sh -p=wcoss2 -a=ATMAQ --build-type=DEBUG |& tee build_debug.log diff --git a/ush/UFS_plot_domains.py b/ush/UFS_plot_domains.py deleted file mode 100755 index 847cd96725..0000000000 --- a/ush/UFS_plot_domains.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/env python - -import matplotlib.pyplot as plt -from mpl_toolkits.basemap import Basemap -from matplotlib.path import Path -import matplotlib.patches as patches -import numpy as np - -#### User-defined variables - - -# Computational grid definitions -ESGgrid_LON_CTR = -153.0 -ESGgrid_LAT_CTR = 61.0 -ESGgrid_DELX = 3000.0 -ESGgrid_DELY = 3000.0 -ESGgrid_NX = 1344 -ESGgrid_NY = 1152 - -# Write component grid definitions - -WRTCMP_nx = 1340 -WRTCMP_ny = 1132 -WRTCMP_lon_lwr_left = 151.5 -WRTCMP_lat_lwr_left = 42.360 -WRTCMP_dx = ESGgrid_DELX -WRTCMP_dy = ESGgrid_DELY - -# Plot-specific definitions - -plot_res = "i" # background map resolution - -# Note: Resolution can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), or 'f' (full) -# To plot maps with higher resolution than low, -# you will need to download and install the basemap-data-hires package - - -#### END User-defined variables - - -ESGgrid_width = ESGgrid_NX * ESGgrid_DELX -ESGgrid_height = ESGgrid_NY * ESGgrid_DELY - -big_grid_width = np.ceil(ESGgrid_width * 1.25) -big_grid_height = np.ceil(ESGgrid_height * 1.25) - -WRTCMP_width = WRTCMP_nx * WRTCMP_dx -WRTCMP_height = WRTCMP_ny * WRTCMP_dy - -fig = plt.figure() - -# ax1 = plt.axes -ax1 = plt.subplot2grid((1, 1), (0, 0)) - -map1 = Basemap( - projection="gnom", - resolution=plot_res, - lon_0=ESGgrid_LON_CTR, - lat_0=ESGgrid_LAT_CTR, - width=big_grid_width, - height=big_grid_height, -) - -map1.drawmapboundary(fill_color="#9999FF") -map1.fillcontinents(color="#ddaa66", lake_color="#9999FF") -map1.drawcoastlines() - -map2 = Basemap( - projection="gnom", - lon_0=ESGgrid_LON_CTR, - lat_0=ESGgrid_LAT_CTR, - width=ESGgrid_width, - height=ESGgrid_height, -) - -# map2.drawmapboundary(fill_color='#9999FF') -# map2.fillcontinents(color='#ddaa66',lake_color='#9999FF') -# map2.drawcoastlines() - - -map3 = Basemap( - llcrnrlon=WRTCMP_lon_lwr_left, - llcrnrlat=WRTCMP_lat_lwr_left, - width=WRTCMP_width, - height=WRTCMP_height, - resolution=plot_res, - projection="lcc", - lat_0=ESGgrid_LAT_CTR, - lon_0=ESGgrid_LON_CTR, -) - -# map3.drawmapboundary(fill_color='#9999FF') -# map3.fillcontinents(color='#ddaa66',lake_color='#9999FF',alpha=0.5) -# map3.drawcoastlines() - - -# Draw gnomonic compute grid rectangle: - -lbx1, lby1 = map1(*map2(map2.xmin, map2.ymin, inverse=True)) -ltx1, lty1 = map1(*map2(map2.xmin, map2.ymax, inverse=True)) -rtx1, rty1 = map1(*map2(map2.xmax, map2.ymax, inverse=True)) -rbx1, rby1 = map1(*map2(map2.xmax, map2.ymin, inverse=True)) - -verts1 = [ - (lbx1, lby1), # left, bottom - (ltx1, lty1), # left, top - (rtx1, rty1), # right, top - (rbx1, rby1), # right, bottom - (lbx1, lby1), # ignored -] - -codes2 = [ - Path.MOVETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.CLOSEPOLY, -] - -path = Path(verts1, codes2) -patch = patches.PathPatch(path, facecolor="r", lw=2, alpha=0.5) -ax1.add_patch(patch) - - -# Draw lambert write grid rectangle: - -# Define a function to get the lambert points in the gnomonic space - - -def get_lambert_points(gnomonic_map, lambert_map, pps): - - # This function takes the lambert domain we have defined, lambert_map, as well as - # pps (the number of points to interpolate and draw for each side of the lambert "rectangle"), - # and returns an array of two lists: one a list of tuples of the 4*ppf + 4 vertices mapping the approximate shape - # of the lambert domain on the gnomonic map, the other a list of "draw" instructions to be used by - # the PathPatch function - - # pps is recommended 10 or less due to time of calculation - - # Start array with bottom left point, "MOVETO" instruction - vertices = [ - gnomonic_map(*lambert_map(lambert_map.xmin, lambert_map.ymin, inverse=True)) - ] - instructions = [Path.MOVETO] - - # Next generate the rest of the left side - lefty = np.linspace(lambert_map.ymin, lambert_map.ymax, num=pps + 1, endpoint=False) - - for y in lefty[1:]: - vertices.append( - tuple(gnomonic_map(*lambert_map(lambert_map.xmin, y, inverse=True))) - ) - instructions.append(Path.LINETO) - - # Next generate the top of the domain - topx = np.linspace(lambert_map.xmin, lambert_map.xmax, num=pps + 1, endpoint=False) - - for x in topx: - vertices.append( - tuple(gnomonic_map(*lambert_map(x, lambert_map.ymax, inverse=True))) - ) - instructions.append(Path.LINETO) - - # Next generate the right side of the domain - righty = np.linspace( - lambert_map.ymax, lambert_map.ymin, num=pps + 1, endpoint=False - ) - - for y in righty: - vertices.append( - tuple(gnomonic_map(*lambert_map(lambert_map.xmax, y, inverse=True))) - ) - instructions.append(Path.LINETO) - - # Finally generate the bottom of the domain - bottomx = np.linspace( - lambert_map.xmax, lambert_map.xmin, num=pps + 1, endpoint=False - ) - - for x in bottomx: - vertices.append( - tuple(gnomonic_map(*lambert_map(x, lambert_map.ymin, inverse=True))) - ) - instructions.append(Path.LINETO) - - # Need to replace final instruction with Path.CLOSEPOLY - instructions[-1] = Path.CLOSEPOLY - - return vertices, instructions - - -# Call the function we just defined to generate a polygon roughly approximating the lambert "rectangle" in gnomonic space - -verts3, codes3 = get_lambert_points(map1, map3, 10) - -# Now draw! - -path = Path(verts3, codes3) -patch = patches.PathPatch(path, facecolor="w", lw=2, alpha=0.5) -ax1.add_patch(patch) - - -plt.show() diff --git a/ush/calculate_cost.py b/ush/calculate_cost.py deleted file mode 100755 index c4647fad88..0000000000 --- a/ush/calculate_cost.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python3 - -import os -import unittest -import argparse - -from python_utils import ( - set_env_var, - load_config_file, - flatten_dict, -) - -from set_predef_grid_params import set_predef_grid_params -from set_gridparams_ESGgrid import set_gridparams_ESGgrid -from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid - - -def calculate_cost(config_fn): - ushdir = os.path.dirname(os.path.abspath(__file__)) - - cfg_u = load_config_file(config_fn) - cfg_u = flatten_dict(cfg_u) - - if 'PREDEF_GRID_NAME' in cfg_u: - params_dict = set_predef_grid_params( - USHdir=ushdir, - grid_name=cfg_u['PREDEF_GRID_NAME'], - quilting=True - ) - - # merge cfg_u with defaults, duplicate keys in cfg_u will overwrite defaults - cfg = {**params_dict, **cfg_u} - else: - cfg = cfg_u - - # number of gridpoints (nx*ny) depends on grid generation method - if cfg['GRID_GEN_METHOD'] == "GFDLgrid": - grid_params = set_gridparams_GFDLgrid( - lon_of_t6_ctr=cfg['GFDLgrid_LON_T6_CTR'], - lat_of_t6_ctr=cfg['GFDLgrid_LAT_T6_CTR'], - res_of_t6g=cfg['GFDLgrid_NUM_CELLS'], - stretch_factor=cfg['GFDLgrid_STRETCH_FAC'], - refine_ratio_t6g_to_t7g=cfg['GFDLgrid_REFINE_RATIO'], - istart_of_t7_on_t6g=cfg['GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G'], - iend_of_t7_on_t6g=cfg['GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G'], - jstart_of_t7_on_t6g=cfg['GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G'], - jend_of_t7_on_t6g=cfg['GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G'], - verbose=False, - nh4=4, - ) - - elif cfg['GRID_GEN_METHOD'] == "ESGgrid": - constants = load_config_file(os.path.join(ushdir, "constants.yaml")) - grid_params = set_gridparams_ESGgrid( - lon_ctr=cfg['ESGgrid_LON_CTR'], - lat_ctr=cfg['ESGgrid_LAT_CTR'], - nx=cfg['ESGgrid_NX'], - ny=cfg['ESGgrid_NY'], - pazi=cfg['ESGgrid_PAZI'], - halo_width=cfg['ESGgrid_WIDE_HALO_WIDTH'], - delx=cfg['ESGgrid_DELX'], - dely=cfg['ESGgrid_DELY'], - constants=constants["constants"], - ) - else: - raise ValueError("GRID_GEN_METHOD is set to an invalid value") - - cost = [cfg['DT_ATMOS'], grid_params["NX"] * grid_params["NY"] ] - - # reference grid (6-hour forecast on RRFS_CONUS_25km) - PREDEF_GRID_NAME = "RRFS_CONUS_25km" - - refgrid = set_predef_grid_params( - USHdir=ushdir, - grid_name=PREDEF_GRID_NAME, - quilting=True, - ) - - cost.extend([refgrid['DT_ATMOS'], refgrid['ESGgrid_NX'] * refgrid['ESGgrid_NY']]) - - return cost - - -# interface -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Calculates parameters needed for calculating cost." - ) - parser.add_argument( - "--cfg", - "-c", - dest="cfg", - required=True, - help="config file containing grid params", - ) - args = parser.parse_args() - - params = calculate_cost(args.cfg) - print(" ".join(map(str, params))) - - -class Testing(unittest.TestCase): - def test_calculate_cost(self): - USHdir = os.path.dirname(os.path.abspath(__file__)) - params = calculate_cost(os.path.join(USHdir, 'config.community.yaml')) - self.assertCountEqual(params, [180, 28689, 180, 28689]) - diff --git a/ush/check_python_version.py b/ush/check_python_version.py deleted file mode 100755 index afc3dac62f..0000000000 --- a/ush/check_python_version.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import logging -import platform -from textwrap import dedent - - -def check_python_version(): - """Check if python version >= 3.6 and presence of some - non-standard packages currently jinja2, yaml, f90nml""" - - # Check for non-standard python packages - try: - import jinja2 - import yaml - import f90nml - except ImportError as error: - logging.error( - dedent( - """ - Error: Missing python package required by the SRW app - """ - ) - ) - raise - - # check python version - major, minor, patch = platform.python_version_tuple() - if int(major) < 3 or int(minor) < 6: - logging.error( - dedent( - f""" - Error: python version must be 3.6 or higher - Your python version is: {major}.{minor}""" - ) - ) - raise Exception("Python version below 3.6") - - -if __name__ == "__main__": - try: - check_python_version() - except: - logging.exception( - dedent( - f""" - ************************************************************************* - FATAL ERROR: - The system does not meet minimum requirements for running the SRW app. - Instructions for setting up python environments can be found on the web: - https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started - *************************************************************************\n - """ - ) - ) - sys.exit(1) diff --git a/ush/check_ruc_lsm.py b/ush/check_ruc_lsm.py deleted file mode 100755 index 3afe1f8264..0000000000 --- a/ush/check_ruc_lsm.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 - -import os -import unittest - -from python_utils import ( - set_env_var, - print_input_args, - load_xml_file, - has_tag_with_value, -) - - -def check_ruc_lsm(ccpp_phys_suite_fp): - """This file defines a function that checks whether the RUC land surface - model (LSM) parameterization is being called by the selected physics suite. - - Args: - ccpp_phys_suite_fp: full path to CCPP physics suite xml file - Returns: - Boolean - """ - - print_input_args(locals()) - - tree = load_xml_file(ccpp_phys_suite_fp) - has_ruc = has_tag_with_value(tree, "scheme", "lsm_ruc") - return has_ruc - - -class Testing(unittest.TestCase): - def test_check_ruc_lsm(self): - USHdir = os.path.dirname(os.path.abspath(__file__)) - self.assertTrue( - check_ruc_lsm( - ccpp_phys_suite_fp=f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml" - ) - ) - - def setUp(self): - set_env_var("DEBUG", True) diff --git a/ush/cmp_expt_to_baseline.sh b/ush/cmp_expt_to_baseline.sh deleted file mode 100755 index b75dce8517..0000000000 --- a/ush/cmp_expt_to_baseline.sh +++ /dev/null @@ -1,411 +0,0 @@ -#!/bin/sh -l -#----------------------------------------------------------------------- -# Description: Compare experiment to a baseline. Can be run with one -# or two command line arguments. With one argument, it -# assumes this is your experiment directory and creates a -# directory for the baseline based on your experiment's -# setup (by reading in the var_defns.sh file in your ex- -# periment directory). With two arguments, it takes the -# first one to be your experiment directory and the second -# the baseline directory. -# -# Usage: ./cmp_expt_to_baseline.sh ${expt_dir} [${baseline_dir}] -# -# Assumptions: RUNDIR1 and RUNDIR2 have the same subdirectory structure. -# nccmp is available as module load -# Script has only been tested on theia -#----------------------------------------------------------------------- - -# Do these need to be machine specific, e.g. by using modulefiles? -module load intel -module load nccmp -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Source bash utility functions. -# -#----------------------------------------------------------------------- -# -. ${scrfunc_dir}/source_util_funcs.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . ${scrfunc_dir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Process arguments. -# -#----------------------------------------------------------------------- -# -if [ $# -eq 0 ] || [ $# -gt 2 ]; then - - printf " -ERROR from script ${scrfunc_fn}: -Only 1 or 2 arguments may be specified. Usage: - - > ${scrfunc_fn} expt_dir [baseline_dir] - -where expt_dir is the experiment directory and baseline_dir is an op- -tional baseline directory. -Exiting with nonzero exit code. -" - exit 1 - -fi -# -#----------------------------------------------------------------------- -# -# Set the experiment directory and make sure that it exists. -# -#----------------------------------------------------------------------- -# -expt_dir="$1" -if [ ! -d "${expt_dir}" ]; then - print_err_msg_exit "\ -The specified experiment directory (expt_dir) does not exist: - expt_dir = \"$expt_dir\" -Exiting script with nonzero return code." -fi -# -#----------------------------------------------------------------------- -# -# Read the variable definitions file in the experiment directory. -# -#----------------------------------------------------------------------- -# -. ${expt_dir}/var_defns.sh -CDATE="${DATE_FIRST_CYCL}" -# -#----------------------------------------------------------------------- -# -# If two arguments are specified, then take the second one to be the di- -# rectory for the baseline. If only one argument is specified, form a -# baseline directory name from the parameters used in the experiment di- -# rectory. If any other number of arguments is specified, print out an -# error message and exit. -# -#----------------------------------------------------------------------- -# -if [ $# -eq 2 ]; then - - baseline_dir="$2" - -else - - baseline_dir="/scratch2/BMC/det/regional_FV3/regr_baselines" - if [ -n ${PREDEF_GRID_NAME} ]; then - baseline_dir="${baseline_dir}/${PREDEF_GRID_NAME}" - else - printf "\ -The experiment must be run on one of the predefined domains. Thus, -PREDEF_GRID_NAME cannot be empty: - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" -Exiting script with nonzero return code. -" - exit 1 - fi - baseline_dir="${baseline_dir}/${CCPP_PHYS_SUITE}phys" - baseline_dir="${baseline_dir}/ICs-${EXTRN_MDL_NAME_ICS}_LBCs-${EXTRN_MDL_NAME_LBCS}" - baseline_dir="${baseline_dir}/$CDATE" - -fi -# -# Make sure that the baseline directory exists. -# -if [ ! -d "${baseline_dir}" ]; then - printf "\n -A baseline directory corresponding to the configuration used in the ex- -periment directory (expt_dir) does not exist: - expt_dir = \"$expt_dir\" - baseline_dir (missing) = \"$baseline_dir\" -Exiting script with nonzero return code." - exit 1 -fi -# -#----------------------------------------------------------------------- -# -# Print out the experiment and baseline directories. -# -#----------------------------------------------------------------------- -# -print_info_msg " -The experiment and baseline directories are: - expt_dir = \"$expt_dir\" - baseline_dir = \"$baseline_dir\"" -# -#----------------------------------------------------------------------- -# -# Set the array containing the names of the subdirectories that will be -# compared. -# -#----------------------------------------------------------------------- -# -# This list should also include $CDATE/postprd since that contains the -# post-processed grib files, but those files' names don't end in a -# standard file extension, e.g. .grb, etc. Must look into this more. -# "grid" \ -# "orog" \ -# "sfc_climo" \ -subdirs=( "." \ - "fix_lam" \ - "$CDATE/${EXTRN_MDL_NAME_ICS}/ICS" \ - "$CDATE/${EXTRN_MDL_NAME_LBCS}/LBCS" \ - "$CDATE/INPUT" \ - "$CDATE/RESTART" \ - "$CDATE" \ - ) -# -#----------------------------------------------------------------------- -# -# Set the array that defines the file extensions to compare in each sub- -# directory. -# -#----------------------------------------------------------------------- -# -#declare -a file_extensions=( "nc" "nemsio" "grb" ) -declare -a file_extensions=( "nc" "grb" ) -#declare -a file_extensions=( "nc" ) -# -#----------------------------------------------------------------------- -# -# Initialize file counts to 0. These are defined as follows: -# -# nfiles_total: -# The number of files in the experiment directory that we attempted to -# compare to the corresponding file in the baseline directory. -# -# nfiles_missing: -# The number of files (out of nfiles_total) that are missing from the -# baseline directory. -# -# nfiles_different: -# The number of files that exist in both the experiment and baseline di- -# rectories and are different. -# -#----------------------------------------------------------------------- -# -nfiles_total=0 -nfiles_missing=0 -nfiles_different=0 -# -#----------------------------------------------------------------------- -# -# Loop over the specified subdirectories. For each subdirectory, com- -# pare files having the specified extensions for the experiment and the -# baseline. -# -#----------------------------------------------------------------------- -# -for subdir in "${subdirs[@]}"; do - - msg="Comparing files in subdirectory \"$subdir\" ..." - msglen=${#msg} - printf "\n%s\n" "$msg" - printf "%0.s=" $(seq 1 $msglen) - printf "\n" - - for file_ext in "${file_extensions[@]}"; do - - msg="Comparing files with extension \"${file_ext}\" ..." - msglen=${#msg} - printf "\n%s\n" " $msg" - printf " " - printf "%0.s~" $(seq 1 $msglen) - printf "\n" - -# cmp_files_btwn_dirs "$expt_dir/$subdir" "${baseline_dir}/$subdir" "${ext}" || { \ -# printf " -#Call to file comparison function failed. Exiting with nonzero exit code. -#"; -# exit 1; } -# -#----------------------------------------------------------------------- -# -# -#----------------------------------------------------------------------- -# - if [ "$file_ext" = "nemsio" ] || [ "$file_ext" = "grb" ]; then - compare_tool="cmp" - elif [ "$file_ext" = "nc" ]; then - compare_tool="nccmp -d" - else - printf "\ -The file comparison tool to use for this file extension has not been -specified: - file_ext = \"${file_ext}\" -Please specify the compare tool and rerun. -Exiting script with nonzero exit code. -" - fi -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# - cd ${expt_dir}/$subdir - num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) -# num_files=$( count_files *.${file_ext} 2>/dev/null | wc -l ) - printf " - Number of files with extension \"${file_ext}\" in subdirectory \"$subdir\" - of the experiment directory is: ${num_files} -" - - if [ "${num_files}" -eq "0" ]; then - printf "\ - Skipping comparison of files with extension \"${file_ext}\" in this subdirectory. -" - else - - fn_len_max=0 - for fn in *.${file_ext}; do - fn_len=${#fn} - if [ ${fn_len} -gt ${fn_len_max} ]; then - fn_len_max=${fn_len} - fi - done - compare_msg_pre=" Comparing file " - msg_len_max=$(( fn_len_max + ${#compare_msg_pre} )) - - for fn in *.${file_ext}; do - - nfiles_total=$(( $nfiles_total + 1 )) - - fn1="$fn" - fn2="${baseline_dir}/$subdir/$fn" - if [ ! -e "$fn2" ]; then # Check if file exists in baseline directory. - - printf " - File specified by fn exists in subdirectory \"$subdir\" of the - experiment directory but not in that of the the baseline directory: - fn = \"$fn\" - subdir = \"$subdir\" - Incrementing missing file count and moving to next file or sub- - directory.\n" - nfiles_missing=$(( nfiles_missing + 1 )) - - else - - msg="${compare_msg_pre}\"$fn\"" - msg_len="${#msg}" - num_dots=$(( msg_len_max - msg_len + 7 )) - dots_str=$( printf "%0.s." $(seq 1 ${num_dots} ) ) - msg="${msg} ${dots_str}" - - printf "$msg" - eval_output=$( eval ${compare_tool} $fn1 $fn2 2>&1 ) - - if [ $? -eq 0 ]; then - printf " Files are identical.\n" - else - printf " FILES ARE DIFFERENT!!!\n" - printf "\ - Error message from \"${compare_tool}\" command is: -${eval_output} -" - nfiles_different=$(( $nfiles_different + 1 )) - fi - - fi - - done # Loop over files of the same extension. - - fi # Number of files > 0 - - done # Loop over file extensions. - -done # Loop over subdirectories. -# -#----------------------------------------------------------------------- -# -# Print out final results. -# -#----------------------------------------------------------------------- -# -msg="Summary of regression test:" -msglen=${#msg} -msg="$msg" -printf "\n%s\n" "$msg" -printf "%0.s=" $(seq 1 $msglen) -printf "\n" - -file_extensions_str=$(printf "\"%s\" " "${file_extensions[@]}"); -file_extensions_str="( ${file_extensions_str})" - -printf " - expt_dir = \"$expt_dir\" - baseline_dir = \"$baseline_dir\" - - file_extensions = ${file_extensions_str} - nfiles_total = ${nfiles_total} - nfiles_missing = ${nfiles_missing} - nfiles_different = ${nfiles_different} - -where - - file_extensions: - Array containing the file extensions considered when comparing files. - Only files ending with one of these extensions are compared. - - nfiles_total: - The number of files in the experiment directory that we attempted to - compare to the corresponding file in the baseline directory. - - nfiles_missing: - The number of files (out of nfiles_total) that are missing from the - baseline directory. - - nfiles_different: - The number of files that exist in both the experiment and baseline di- - rectories and are different. - -" - -if [ ${nfiles_missing} -eq 0 ] && [ ${nfiles_different} -eq 0 ]; then - result_str="PASS :)" - exit_code=0 -else - - exit_code=1 - if [ ${nfiles_missing} -ne 0 ] && [ ${nfiles_different} -eq 0 ]; then - result_str="FAIL (due to missing files)" - elif [ ${nfiles_missing} -eq 0 ] && [ ${nfiles_different} -ne 0 ]; then - result_str="FAIL (due to differing files)" - elif [ ${nfiles_missing} -ne 0 ] && [ ${nfiles_different} -ne 0 ]; then - result_str="FAIL (due to missing and differing files)" - fi - -fi - -printf "Final result of regression test: ${result_str}\n" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - -exit ${exit_code} - diff --git a/ush/cmp_rundirs_ncfiles.sh b/ush/cmp_rundirs_ncfiles.sh deleted file mode 100755 index c650454877..0000000000 --- a/ush/cmp_rundirs_ncfiles.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/sh -l - -module load nccmp -# -#----------------------------------------------------------------------- -# -# Define generic function to compare NetCDF files in two directories. -# -#----------------------------------------------------------------------- -# -function cmp_ncfiles_one_dir() { - - local dir1="$1" - local dir2="$2" - local subdir="$3" - local fileext="$4" - - local fn="" - local msg="" - - cd $dir1/$subdir - - for fn in *.$fileext; do - - fn1="$fn" - if [ -f "$fn1" ] && [ ! -L "$fn1" ]; then # Check if regular file and not a symlink. - - fn2="$dir2/$subdir/$fn" - if [ -e "$fn2" ]; then # Check if file exists. - - if [ -f "$fn2" ] && [ ! -L "$fn2" ]; then # Check if regular file and not a symlink. - - printf "\nComparing file \"$fn\" in subdirectory \"$subdir\" ...\n" - nccmp -d $fn1 $fn2 -# nccmp -dS $fn1 $fn2 -# nccmp -d -t 1e-3 $fn1 $fn2 -# nccmp -d --precision='%g10.5' $fn1 $fn2 - - if [ $? = 0 ]; then - msg=$( printf "%s" "Files are identical." ) - elif [ $? = 1 ]; then - msg=$( printf "%s" "===>>> FILES ARE DIFFERENT!!!" ) - else - msg=$( printf "%s" "FATAL ERROR. Exiting script." ) - exit 1 - fi - - printf "%s\n" "$msg" - - else - printf "\n%s\n" "File \"$fn\" in \"$dir2/$subdir\" is a symbolic link. Skipping." - fi - - else - printf "\n%s\n" "File \"$fn\" does not exist in \"$dir2/$subdir\"." - printf "\n%s\n" "Exiting script." - exit 1 - fi - - else - printf "\n%s\n" "File \"$fn\" in \"$dir1/$subdir\" is a symbolic link. Skipping." - fi - - done - -} -# -#----------------------------------------------------------------------- -# -# Get the two run directories to compare from command-line arguments. -# Then compare NetCDF files in the run directories as well as in their -# INPUT subdirectories. -# -#----------------------------------------------------------------------- -# -#set -x - -rundir1="$( readlink -f $1 )" -rundir2="$( readlink -f $2 )" - -printf "\n" -printf "%s\n" "rundir1 = \"$rundir1\"" -printf "%s\n" "rundir2 = \"$rundir2\"" - -subdirs=("INPUT" ".") - -for subdir in "${subdirs[@]}"; do - - msg=$( printf "%s" "Comparing files in subdirectory \"$subdir\" ..." ) - msglen=${#msg} - printf "\n%s\n" "$msg" - printf "%0.s=" $(seq 1 $msglen) - printf "\n" - - cmp_ncfiles_one_dir "$rundir1" "$rundir2" "$subdir" "nc" - -done - - - diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml deleted file mode 100755 index cc491d25b4..0000000000 --- a/ush/config.aqm.nco.realtime.yaml +++ /dev/null @@ -1,117 +0,0 @@ -metadata: - description: config for Online-CMAQ, AQM_NA_13km, real-time, NCO mode on WCOSS2 -user: - MACHINE: wcoss2 - ACCOUNT: AQM-DEV -platform: - WORKFLOW_MANAGER: rocoto -workflow: - USE_CRON_TO_RELAUNCH: true - CRON_RELAUNCH_INTVL_MNTS: 3 - EXPT_SUBDIR: aqm_nco_aqmna13km - PREDEF_GRID_NAME: AQM_NA_13km - CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023102600' - DATE_LAST_CYCL: '2023102618' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 72 - - 72 - - 6 - PREEXISTING_DIR_METHOD: rename - VERBOSE: true - DEBUG: true - COMPILER: intel - DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 - FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 - DO_REAL_TIME: true - COLDSTART: true -# WARMSTART_CYCLE_DIR: /path/to/restart/dir -nco: - envir_dfv: prod - NET_dfv: aqm - model_ver_dfv: v7.0 - RUN_dfv: aqm -# OPSROOT_dfv: /path/to/custom/opsroot - KEEPDATA_dfv: false -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false - RUN_TASK_GET_EXTRN_ICS: false - RUN_TASK_GET_EXTRN_LBCS: false - RUN_TASK_NEXUS_GFS_SFC: false - RUN_TASK_RUN_POST: true - RUN_TASK_AQM_ICS: true - RUN_TASK_AQM_LBCS: true - RUN_TASK_NEXUS_EMISSION: true - RUN_TASK_FIRE_EMISSION: true - RUN_TASK_POINT_SOURCE: true - RUN_TASK_PRE_POST_STAT: true - RUN_TASK_POST_STAT_O3: true - RUN_TASK_POST_STAT_PM25: true - RUN_TASK_BIAS_CORRECTION_O3: true - RUN_TASK_BIAS_CORRECTION_PM25: true -task_make_grid: - GRID_DIR: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/aqm/DOMAIN_DATA/AQM_NA_13km -task_make_orog: - OROG_DIR: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/aqm/DOMAIN_DATA/AQM_NA_13km -task_make_sfc_climo: - SFC_CLIMO_DIR: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/aqm/DOMAIN_DATA/AQM_NA_13km -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: netcdf - EXTRN_MDL_ICS_OFFSET_HRS: 6 -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 6 - FV3GFS_FILE_FMT_LBCS: netcdf - EXTRN_MDL_LBCS_OFFSET_HRS: 6 - WTIME_GET_EXTRN_LBCS: 02:00:00 -task_make_lbcs: - NNODES_MAKE_LBCS: 1 - PPN_MAKE_LBCS: 128 -task_run_fcst: - DT_ATMOS: 180 - LAYOUT_X: 50 - LAYOUT_Y: 34 - BLOCKSIZE: 16 - RESTART_INTERVAL: 6 24 42 60 - WTIME_RUN_FCST: 04:00:00 - QUILTING: true - PRINT_ESMF: false - DO_FCST_RESTART: true -task_run_post: - POST_OUTPUT_DOMAIN_NAME: 793 - USE_CUSTOM_POST_CONFIG_FILE: false -task_aqm_lbcs: - WTIME_AQM_LBCS: 01:00:00 -task_nexus_gfs_sfc: - NEXUS_GFS_SFC_OFFSET_HRS: 6 -global: - DO_ENSEMBLE: false - NUM_ENS_MEMBERS: 2 - HALO_BLEND: 0 -cpl_aqm_parm: - CPL_AQM: true - DO_AQM_CHEM_LBCS: true - DO_AQM_GEFS_LBCS: true - DO_AQM_DUST: true - DO_AQM_CANOPY: false - DO_AQM_PRODUCT: true - DO_AQM_SAVE_AIRNOW_HIST: false - DO_AQM_SAVE_FIRE: false - AQM_BIO_FILE: BEIS_RRFScmaq_C775.ncf - AQM_DUST_FILE_PREFIX: FENGSHA_p8_10km_inputs - AQM_DUST_FILE_SUFFIX: .nc - AQM_CANOPY_FILE_PREFIX: gfs.t12z.geo - AQM_CANOPY_FILE_SUFFIX: .canopy_regrid.nc - AQM_FIRE_FILE_PREFIX: Hourly_Emissions_regrid_NA_13km - AQM_FIRE_FILE_SUFFIX: _h72.nc - AQM_RC_FIRE_FREQUENCY: hourly - AQM_LBCS_FILES: am4_bndy.c793.2019.v1.nc - NEXUS_GRID_FN: grid_spec_793.nc - NUM_SPLIT_NEXUS: 6 - diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml deleted file mode 100755 index 1250bfe595..0000000000 --- a/ush/config_defaults.yaml +++ /dev/null @@ -1,3053 +0,0 @@ -#---------------------------- -# Test description -#---------------------------- -metadata: - description: >- - Default configuration for an experiment. The valid values for most of the - parameters are specified in valid_param_vals.yaml - version: !!str '1.0' -#---------------------------- -# USER config parameters -#---------------------------- -user: - # - #----------------------------------------------------------------------- - # - # WCOSS Implementation Standards document: - # - # NCEP Central Operations - # WCOSS Implementation Standards - # April 19, 2022 - # Version 11.0.0 - # - #---------------------------------------------------------------------- - # - #----------------------------------------------------------------------- - # - # Set machine and queue parameters. Definitions: - # - # MACHINE: - # Machine on which the workflow will run. If you are NOT on a named, - # supported platform, and you want to use the Rocoto workflow manager, - # you will need set MACHINE: "linux" and WORKFLOW_MANAGER: "rocoto". This - # combination will assume a Slurm batch manager when generating the XML. - # Please see ush/valid_param_vals.yaml for a full list of supported - # platforms. - # - # ACCOUNT: - # The account under which to submit jobs to the queue. - # - #----------------------------------------------------------------------- - MACHINE: "BIG_COMPUTER" - ACCOUNT: "" - - HOMEaqm: '{{ user.HOMEaqm }}' - USHdir: '{{ user.USHdir }}' - SCRIPTSdir: '{{ [HOMEaqm, "scripts"]|path_join }}' - JOBSdir: '{{ [HOMEaqm, "jobs"]|path_join }}' - SORCdir: '{{ [HOMEaqm, "sorc"]|path_join }}' - PARMdir: '{{ [HOMEaqm, "parm"]|path_join }}' - FIXdir: '{{ [HOMEaqm, "fix"]|path_join }}' - FIXaqm: '{{ [HOMEaqm, "fix"]|path_join }}' - MODULESdir: '{{ [HOMEaqm, "modulefiles"]|path_join }}' - EXECdir: '{{ [HOMEaqm, workflow.EXEC_SUBDIR]|path_join }}' - VX_CONFIG_DIR: '{{ [HOMEaqm, "parm"]|path_join }}' - METPLUS_CONF: '{{ [PARMdir, "metplus"]|path_join }}' - MET_CONFIG: '{{ [PARMdir, "met"]|path_join }}' - UFS_WTHR_MDL_DIR: '{{ user.UFS_WTHR_MDL_DIR }}' - ARL_NEXUS_DIR: '{{ [SORCdir, "arl_nexus"]|path_join }}' - -#---------------------------- -# PLATFORM config parameters -#----------------------------- -platform: - # - #----------------------------------------------------------------------- - # - # WORKFLOW_MANAGER: - # The workflow manager to use (e.g. rocoto). This is set to "none" by - # default, but if the machine name is set to a platform that supports - # rocoto, this will be overwritten and set to "rocoto". If set - # explicitly to rocoto along with the use of the MACHINE=linux target, - # the configuration layer assumes a Slurm batch manager when generating - # the XML. Valid options: "rocoto" or "none" - # - # NCORES_PER_NODE: - # The number of cores available per node on the compute platform, now - # configurable for all platforms. - # - # TASKTHROTTLE: - # The number of active tasks run simultaneously. For linux/mac setting this - # to 1 makes sense - # - # BUILD_MOD_FN: - # Name of alternative build module file to use if using an - # unsupported platform. Is set automatically for supported machines. - # - # WFLOW_MOD_FN: - # Name of alternative workflow module file to use if using an - # unsupported platform. Is set automatically for supported machines. - # - # BUILD_VER_FN: - # File name containing the version of the modules used for building the app. - # Currently, WCOSS2 only uses this file. - # - # RUN_VER_FN: - # File name containing the version of the modules used for running the app. - # Currently, WCOSS2 only uses this file. - # - # SCHED: - # The job scheduler to use (e.g. slurm). Set this to an empty string in - # order for the experiment generation script to set it depending on the - # machine. - # - # PARTITION_DEFAULT: - # If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), - # the default partition to which to submit workflow tasks. If a task - # does not have a specific variable that specifies the partition to which - # it will be submitted (e.g. PARTITION_HPSS, PARTITION_FCST; see below), - # it will be submitted to the partition specified by this variable. If - # this is not set or is set to an empty string, it will be (re)set to a - # machine-dependent value. This is not used if SCHED is not set to - # "slurm". - # - # QUEUE_DEFAULT: - # The default queue or QOS (if using the slurm job scheduler, where QOS - # is Quality of Service) to which workflow tasks are submitted. If a - # task does not have a specific variable that specifies the queue to which - # it will be submitted (e.g. QUEUE_HPSS, QUEUE_FCST; see below), it will - # be submitted to the queue specified by this variable. If this is not - # set or is set to an empty string, it will be (re)set to a machine- - # dependent value. - # - # PARTITION_HPSS: - # If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), - # the partition to which the tasks that get or create links to external - # model files [which are needed to generate initial conditions (ICs) and - # lateral boundary conditions (LBCs)] are submitted. If this is not set - # or is set to an empty string, it will be (re)set to a machine-dependent - # value. This is not used if SCHED is not set to "slurm". - # - # QUEUE_HPSS: - # The queue or QOS to which the tasks that get or create links to external - # model files [which are needed to generate initial conditions (ICs) and - # lateral boundary conditions (LBCs)] are submitted. If this is not set - # or is set to an empty string, it will be (re)set to a machine-dependent - # value. - # - # PARTITION_FCST: - # If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), - # the partition to which the task that runs forecasts is submitted. If - # this is not set or set to an empty string, it will be (re)set to a - # machine-dependent value. This is not used if SCHED is not set to - # "slurm". - # - # QUEUE_FCST: - # The queue or QOS to which the task that runs a forecast is submitted. - # If this is not set or set to an empty string, it will be (re)set to a - # machine-dependent value. - # - #----------------------------------------------------------------------- - # - WORKFLOW_MANAGER: "" - NCORES_PER_NODE: "" - TASKTHROTTLE: 1000 - BUILD_MOD_FN: 'build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }}' - WFLOW_MOD_FN: 'wflow_{{ user.MACHINE|lower() }}' - BUILD_VER_FN: 'build.ver' - RUN_VER_FN: 'run.ver' - SCHED: "" - PARTITION_DEFAULT: "" - QUEUE_DEFAULT: "" - PARTITION_HPSS: "" - QUEUE_HPSS: "" - PARTITION_FCST: "" - QUEUE_FCST: "" - # - #----------------------------------------------------------------------- - # - # Set run commands for platforms without a workflow manager. These values - # will be ignored unless WORKFLOW_MANAGER: "none". Definitions: - # - # RUN_CMD_UTILS: - # The run command for pre-processing utilities (shave, orog, sfc_climo_gen, - # etc.) Can be left blank for smaller domains, in which case the executables - # will run without MPI. - # - # RUN_CMD_FCST: - # The run command for the model forecast step. - # - # RUN_CMD_POST: - # The run command for post-processing (UPP). Can be left blank for smaller - # domains, in which case UPP will run without MPI. - # - # RUN_CMD_PRDGEN: - # The run command for the product generation job. - # - # RUN_CMD_SERIAL: - # The run command for some serial jobs - # - # RUN_CMD_AQM: - # The run command for some AQM tasks. - # - # RUN_CMD_AQMLBC: - # The run command for the AQM_LBCS task. - # - #----------------------------------------------------------------------- - # - RUN_CMD_SERIAL: "" - RUN_CMD_UTILS: "" - RUN_CMD_FCST: "" - RUN_CMD_POST: "" - RUN_CMD_PRDGEN: "" - RUN_CMD_AQM: "" - RUN_CMD_AQMLBC: "" - - # - #----------------------------------------------------------------------- - # - # Allows an extra parameter to be passed to SCHEDULER (SLURM/PBSPRO) via - # XML Native command - # - SCHED_NATIVE_CMD: "" - - # - #----------------------------------------------------------------------- - # - # Set METplus parameters. Definitions: - # - # MET_INSTALL_DIR: - # Location to top-level directory of MET installation. - # - # MET_BIN_EXEC: - # Subdirectory containing MET binaries e.g. "bin" - # - # METPLUS_PATH: - # Location to top-level directory of METplus installation. - # - # MET_BIN_EXEC - # Name of subdirectory where METplus executables are installed. - # - # CCPA_OBS_DIR: - # User-specified location of top-level directory where CCPA hourly - # precipitation files used by METplus are located. This parameter needs - # to be set for both user-provided observations and for observations - # that are retrieved from the NOAA HPSS (if the user has access) via - # the TN_GET_OBS_CCPA task (activated in workflow by setting - # RUN_TASK_GET_OBS_CCPA=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/ccpa/proc. METplus is configured to verify 01-, - # 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. - # METplus configuration files require the use of predetermined directory - # structure and file names. Therefore, if the CCPA files are user - # provided, they need to follow the anticipated naming structure: - # {YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2, where YYYY is the 4-digit - # valid year, MM the 2-digit valid month, DD the 2-digit valid day of - # the month, and HH the 2-digit valid hour of the day. In addition, a - # caveat is noted for using hourly CCPA data. There is a problem with - # the valid time in the metadata for files valid from 19 - 00 UTC (or - # files under the '00' directory). The script to pull the CCPA data - # from the NOAA HPSS has an example of how to account for this as well - # as organizing the data into a more intuitive format: - # scripts/exregional_get_ccpa_files.sh. When a fix is provided, it will - # be accounted for in the exregional_get_ccpa_files.sh script. - # - # MRMS_OBS_DIR: - # User-specified location of top-level directory where MRMS composite - # reflectivity files used by METplus are located. This parameter needs - # to be set for both user-provided observations and for observations - # that are retrieved from the NOAA HPSS (if the user has access) via the - # TN_GET_OBS_MRMS task (activated in workflow by setting - # RUN_TASK_GET_OBS_MRMS=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/mrms/proc. METplus configuration files require the - # use of predetermined directory structure and file names. Therefore, if - # the MRMS files are user provided, they need to follow the anticipated - # naming structure: - # {YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2, - # where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD - # the 2-digit valid day of the month, HH the 2-digit valid hour of the - # day, mm the 2-digit valid minutes of the hour, and SS is the two-digit - # valid seconds of the hour. In addition, METplus is configured to look - # for a MRMS composite reflectivity file for the valid time of the - # forecast being verified; since MRMS composite reflectivity files do - # not always exactly match the valid time, a script, within the main - # script to retrieve MRMS data from the NOAA HPSS, is used to identify - # and rename the MRMS composite reflectivity file to match the valid - # time of the forecast. The script to pull the MRMS data from the NOAA - # HPSS has an example of the expected file naming structure: - # scripts/exregional_get_mrms_files.sh. This script calls the script - # used to identify the MRMS file closest to the valid time: - # ush/mrms_pull_topofhour.py. - # - # NDAS_OBS_DIR: - # User-specified location of top-level directory where NDAS prepbufr - # files used by METplus are located. This parameter needs to be set for - # both user-provided observations and for observations that are - # retrieved from the NOAA HPSS (if the user has access) via the - # TN_GET_OBS_NDAS task (activated in workflow by setting  - # RUN_TASK_GET_OBS_NDAS=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/ndas/proc. METplus is configured to verify - # near-surface variables hourly and upper-air variables at times valid - # at 00 and 12 UTC with NDAS prepbufr files. METplus configuration files - # require the use of predetermined file names. Therefore, if the NDAS - # files are user provided, they need to follow the anticipated naming - # structure: prepbufr.ndas.{YYYYMMDDHH}, where YYYY is the 4-digit valid - # year, MM the 2-digit valid month, DD the 2-digit valid day of the - # month, and HH the 2-digit valid hour of the day. The script to pull - # the NDAS data from the NOAA HPSS has an example of how to rename the - # NDAS data into a more intuitive format with the valid time listed in - # the file name: scripts/exregional_get_ndas_files.sh - # - #----------------------------------------------------------------------- - # - MET_INSTALL_DIR: "" - MET_BIN_EXEC: "" - METPLUS_PATH: "" - CCPA_OBS_DIR: "" - MRMS_OBS_DIR: "" - NDAS_OBS_DIR: "" - FIXaqm_sav: "" - # - #----------------------------------------------------------------------- - # - # DOMAIN_PREGEN_BASEDIR: - # The base directory containing pregenerated grid, orography, and surface - # climatology files. This is an alternative for setting GRID_DIR, - # OROG_DIR, and SFC_CLIMO_DIR individually - # - # For the pregenerated grid specified by PREDEF_GRID_NAME, - # these "fixed" files are located in: - # - # ${DOMAIN_PREGEN_BASEDIR}/${PREDEF_GRID_NAME} - # - # The workflow scripts will create a symlink in the experiment directory - # that will point to a subdirectory (having the name of the grid being - # used) under this directory. This variable should be set to a null - # string in this file, but it can be specified in the user-specified - # workflow configuration file (EXPT_CONFIG_FN). - # - #----------------------------------------------------------------------- - # - DOMAIN_PREGEN_BASEDIR: "" - # - #----------------------------------------------------------------------- - # Pre task commands such as "ulimit" needed by tasks - #----------------------------------------------------------------------- - # - PRE_TASK_CMDS: "" - # - #----------------------------------------------------------------------- - # Test directories used in run_WE2E script - #----------------------------------------------------------------------- - # - TEST_EXTRN_MDL_SOURCE_BASEDIR: "" - TEST_PREGEN_BASEDIR: "" - TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: "" - TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: "" - TEST_VX_FCST_INPUT_BASEDIR: "" - # - #----------------------------------------------------------------------- - # - # Set parameters associated with the fixed (i.e. static) files. Definitions: - # - # FIXgsm: - # System directory in which the majority of fixed (i.e. time-independent) - # files that are needed to run the FV3-LAM model are located - # - # FIXaer: - # System directory where MERRA2 aerosol climatology files are located - # - # FIXlut: - # System directory where the lookup tables for optics properties are located - # - # FIXorg: - # System directory where orography data is located - # - # FIXsfc: - # System directory where surface climatology data is located - # - #----------------------------------------------------------------------- - # - FIXgsm: "" - FIXaer: "" - FIXlut: "" - FIXorg: "" - FIXsfc: "" - FIXshp: "" - # - #----------------------------------------------------------------------- - # - # EXTRN_MDL_DATA_STORES: - # A list of data stores where the scripts should look for external model - # data. The list is in priority order. If disk information is provided - # via USE_USER_STAGED_EXTRN_FILES or a known location on the platform, - # the disk location will be highest priority. Options are disk, hpss, - # aws, and nomads. - # - #----------------------------------------------------------------------- - # - EXTRN_MDL_DATA_STORES: "" - -#----------------------------- -# WORKFLOW config parameters -#----------------------------- -workflow: - # - #----------------------------------------------------------------------- - # - # Unique ID for workflow run that will be set in setup.py - # - #----------------------------------------------------------------------- - # - WORKFLOW_ID: "" - # - #----------------------------------------------------------------------- - # - # How to make links. Relative links by default. Empty string for - # absolute paths in links. - # - #----------------------------------------------------------------------- - # - RELATIVE_LINK_FLAG: "--relative" - # - #----------------------------------------------------------------------- - # - # Set cron-associated parameters. Definitions: - # - # USE_CRON_TO_RELAUNCH: - # Flag that determines whether or not to add a line to the user's cron - # table to call the experiment launch script every CRON_RELAUNCH_INTVL_MNTS - # minutes. - # - # CRON_RELAUNCH_INTVL_MNTS: - # The interval (in minutes) between successive calls of the experiment - # launch script by a cron job to (re)launch the experiment (so that the - # workflow for the experiment kicks off where it left off). - # - #----------------------------------------------------------------------- - # - USE_CRON_TO_RELAUNCH: false - CRON_RELAUNCH_INTVL_MNTS: 3 - CRONTAB_LINE: "" - LOAD_MODULES_RUN_TASK_FP: '{{ [user.USHdir, "load_modules_run_task.sh"]|path_join }}' - - # - #----------------------------------------------------------------------- - # - # Set directories. Definitions: - # - # EXPT_BASEDIR: - # The base directory in which the experiment directory will be created. - # If this is not specified or if it is set to an empty string, it will - # default to ${HOMEaqm}/../expt_dirs. If set to a relative path, the - # path will be appended to the default value ${HOMEaqm}/../expt_dirs - # - # EXPT_SUBDIR: - # The name that the experiment directory (without the full path) will - # have. The full path to the experiment directory, which will be contained - # in the variable EXPTDIR, will be: - # - # EXPTDIR: "${EXPT_BASEDIR}/${EXPT_SUBDIR}" - # - # This cannot be empty. If set to a null string here, it must be set to - # a (non-empty) value in the user-defined experiment configuration file. - # - # EXEC_SUBDIR: - # The name of the subdirectory of ufs-srweather-app where executables are - # installed. - #----------------------------------------------------------------------- - # - EXPT_BASEDIR: '' # This will be set in setup.py prior to extend_yaml() being called - EXPT_SUBDIR: '{{ EXPT_SUBDIR }}' - EXEC_SUBDIR: "exec" - EXPTDIR: '{{ [workflow.EXPT_BASEDIR, workflow.EXPT_SUBDIR]|path_join }}' - # - #----------------------------------------------------------------------- - # - # Set the separator character(s) to use in the names of the grid, mosaic, - # and orography fixed files. - # - # Ideally, the same separator should be used in the names of these fixed - # files as the surface climatology fixed files (which always use a "." - # as the separator), i.e. ideally, DOT_OR_USCORE should be set to "." - # - #----------------------------------------------------------------------- - # - DOT_OR_USCORE: "_" - # - #----------------------------------------------------------------------- - # - # Set file names. Definitions: - # - # EXPT_CONFIG_FN: - # Name of the user-specified configuration file for the forecast experiment. - # - # CONSTANTS_FN: - # Name of the file containing definitions of various mathematical, physical, - # and SRW App contants. - # - # RGNL_GRID_NML_FN: - # Name of file containing the namelist settings for the code that generates - # a "ESGgrid" type of regional grid. - # - # FV3_NML_BASE_SUITE_FN: - # Name of Fortran namelist file containing the forecast model's base suite - # namelist, i.e. the portion of the namelist that is common to all physics - # suites. - # - # FV3_NML_YAML_CONFIG_FN: - # Name of YAML configuration file containing the forecast model's namelist - # settings for various physics suites. - # - # FV3_NML_BASE_ENS_FN: - # Name of Fortran namelist file containing the forecast model's base - # ensemble namelist, i.e. the the namelist file that is the starting point - # from which the namelist files for each of the enesemble members are - # generated. - # - # FV3_EXEC_FN: - # Name to use for the forecast model executable when it is copied from - # the directory in which it is created in the build step to the executables - # directory (EXECDIR; this is set during experiment generation). - # - # DIAG_TABLE_TMPL_FN: - # Name of a template file that specifies the output fields of the - # forecast model (ufs-weather-model: diag_table) followed by the name - # of the ccpp_phys_suite. Its default value is the name of the file - # that the ufs weather model - # expects to read in. - # - # FIELD_TABLE_TMPL_FN: - # Name of a template file that specifies the tracers in IC/LBC files of the - # forecast model (ufs-weather-mode: field_table) followed by [dot_ccpp_phys_suite]. - # Its default value is the name of the file that the ufs weather model expects - # to read in. - # - # MODEL_CONFIG_TMPL_FN: - # Name of a template file that contains settings and configurations for the - # NUOPC/ESMF main component (ufs-weather-model: model_config). Its default - # value is the name of the file that the ufs weather model expects to read in. - # - # NEMS_CONFIG_TMPL_FN: - # Name of a template file that contains information about the various NEMS - # components and their run sequence (ufs-weather-model: nems.configure). - # Its default value is the name of the file that the ufs weather model expects - # to read in. - # - # AQM_RC_TMPL_FN: - # Template file name of resource file for NOAA Air Quality Model (AQM) - # - # FCST_MODEL: - # Name of forecast model (default=ufs-weather-model) - # - # WFLOW_XML_FN: - # Name of the rocoto workflow XML file that the experiment generation - # script creates and that defines the workflow for the experiment. - # - # GLOBAL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the defintions of the primary - # experiment variables (parameters) defined in this default configuration - # script and in the user-specified configuration as well as secondary - # experiment variables generated by the experiment generation script. - # This file is sourced by many scripts (e.g. the J-job scripts corresponding - # to each workflow task) in order to make all the experiment variables - # available in those scripts. - # - # EXTRN_MDL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the defintions of variables - # associated with the external model from which ICs or LBCs are generated. This - # file is created by the TN_GET_EXTRN_* task because the values of the variables - # it contains are not known before this task runs. The file is then sourced by - # the TN_MAKE_ICS and TN_MAKE_LBCS tasks. - # - # WFLOW_LAUNCH_SCRIPT_FN: - # Name of the script that can be used to (re)launch the experiment's rocoto - # workflow. - # - # WFLOW_LAUNCH_LOG_FN: - # Name of the log file that contains the output from successive calls to - # the workflow launch script (WFLOW_LAUNCH_SCRIPT_FN). - # - #----------------------------------------------------------------------- - # - EXPT_CONFIG_FN: "config.yaml" - CONSTANTS_FN: "constants.yaml" - - RGNL_GRID_NML_FN: "regional_grid.nml" - - FV3_NML_BASE_SUITE_FN: "input.nml.FV3" - FV3_NML_YAML_CONFIG_FN: "FV3.input.yml" - FV3_NML_BASE_ENS_FN: "input.nml.base_ens" - FV3_NML_FN: "input.nml" - FV3_EXEC_FN: "ufs_model" - - DATA_TABLE_FN: "data_table" - DIAG_TABLE_FN: "diag_table" - FIELD_TABLE_FN: "field_table" - DIAG_TABLE_TMPL_FN: 'diag_table.{{ CCPP_PHYS_SUITE }}' - FIELD_TABLE_TMPL_FN: 'field_table.{{ CCPP_PHYS_SUITE }}' - MODEL_CONFIG_FN: "model_configure" - NEMS_CONFIG_FN: "nems.configure" - AQM_RC_FN: "aqm.rc" - AQM_RC_TMPL_FN: "aqm.rc" - - FV3_NML_BASE_SUITE_FP: '{{ [user.PARMdir, FV3_NML_BASE_SUITE_FN]|path_join }}' - FV3_NML_YAML_CONFIG_FP: '{{ [user.PARMdir, FV3_NML_YAML_CONFIG_FN]|path_join }}' - FV3_NML_BASE_ENS_FP: '{{ [EXPTDIR, FV3_NML_BASE_ENS_FN]|path_join }}' - DATA_TABLE_TMPL_FP: '{{ [user.PARMdir, DATA_TABLE_FN]|path_join }}' - DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join }}' - FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join }}' - MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}' - NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}' - AQM_RC_TMPL_FP: '{{ [user.PARMdir, AQM_RC_TMPL_FN]|path_join }}' - - # These are staged in the exptdir at configuration time - DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join }}' - FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}' - NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}' - FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' - - FCST_MODEL: "ufs-weather-model" - WFLOW_XML_FN: "FV3LAM_wflow.xml" - GLOBAL_VAR_DEFNS_FN: "var_defns.sh" - EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" - WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" - WFLOW_LAUNCH_LOG_FN: "log.launch_FV3LAM_wflow" - - GLOBAL_VAR_DEFNS_FP: '{{ [EXPTDIR, GLOBAL_VAR_DEFNS_FN] |path_join }}' - WFLOW_LAUNCH_SCRIPT_FP: '{{ [user.USHdir, WFLOW_LAUNCH_SCRIPT_FN] |path_join }}' - WFLOW_LAUNCH_LOG_FP: '{{ [EXPTDIR, WFLOW_LAUNCH_LOG_FN] |path_join }}' - # - #----------------------------------------------------------------------- - # - # Set the fix file paths - # - # FIXdir: - # Location where fix files will be stored for a given experiment - # - # FIXam: - # Directory containing the fixed files (or symlinks) for various fields on - # global grids (which are usually much coarser than the native FV3-LAM grid). - # - # FIXclim: - # Directory containing the MERRA2 aerosol climatology data file and - # lookup tables for optics properties - # - # FIXlam: - # Directory containing the fixed files (or symlinks) for the grid, - # orography, and surface climatology on the native FV3-LAM grid. - # - # THOMPSON_MP_CLIMO_FN and _FP: - # Name and path of file that contains aerosol climatology data. It can - # be used to generate approximate versions of the aerosol fields - # needed by Thompson microphysics. This file will be used to - # generate such approximate aerosol fields in the ICs and LBCs if - # Thompson MP is included in the physics suite and if the exteranl - # model for ICs or LBCs does not already provide these fields. - # - #----------------------------------------------------------------------- - # - FIXdir: '{{ EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else [user.HOMEaqm, "fix"]|path_join }}' - FIXam: '{{ [FIXdir, "fix_am"]|path_join }}' - FIXclim: '{{ [FIXdir, "fix_clim"]|path_join }}' - FIXlam: '{{ [FIXdir, "fix_lam"]|path_join }}' - - THOMPSON_MP_CLIMO_FN: "Thompson_MP_MONTHLY_CLIMO.nc" - THOMPSON_MP_CLIMO_FP: '{{ [FIXam, THOMPSON_MP_CLIMO_FN]|path_join }}' - # - #----------------------------------------------------------------------- - # - # Set CCPP-associated parameters. Definitions: - # - # CCPP_PHYS_SUITE: - # The physics suite that will run using CCPP (Common Community Physics - # Package). The choice of physics suite determines the forecast model's - # namelist file, the diagnostics table file, the field table file, and - # the XML physics suite definition file that are staged in the experiment - # directory or the cycle directories under it. - # - # *_FN and *_FP variables set the name and paths to the suite - # definition files used for the experiment - #----------------------------------------------------------------------- - # - CCPP_PHYS_SUITE: "FV3_GFS_v16" - CCPP_PHYS_SUITE_FN: 'suite_{{ CCPP_PHYS_SUITE }}.xml' - CCPP_PHYS_SUITE_IN_CCPP_FP: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "suites", CCPP_PHYS_SUITE_FN] |path_join }}' - CCPP_PHYS_SUITE_FP: '{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}' - # - #----------------------------------------------------------------------- - # - # Set the field dictionary file name and paths. - # - #----------------------------------------------------------------------- - # - FIELD_DICT_FN: "fd_nems.yaml" - FIELD_DICT_IN_UWM_FP: '{{ [user.UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN]|path_join }}' - FIELD_DICT_FP: '{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join }}' - # - #----------------------------------------------------------------------- - # - # Set GRID_GEN_METHOD. This variable specifies the method to use to - # generate a regional grid in the horizontal. The values that it can - # take on are: - # - # * "GFDLgrid": - # This setting will generate a regional grid by first generating a - # "parent" global cubed-sphere grid and then taking a portion of tile - # 6 of that global grid -- referred to in the grid generation scripts - # as "tile 7" even though it doesn't correspond to a complete tile -- - # and using it as the regional grid. Note that the forecast is run on - # only on the regional grid (i.e. tile 7, not tiles 1 through 6). - # - # * "ESGgrid": - # This will generate a regional grid using the map projection developed - # by Jim Purser of EMC. - # - # Note that: - # - # 1) If the experiment is using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to the name of one of the valid predefined - # grids), then GRID_GEN_METHOD will be reset to the value of - # GRID_GEN_METHOD for that grid. This will happen regardless of - # whether or not GRID_GEN_METHOD is assigned a value in the user- - # specified experiment configuration file, i.e. any value it may be - # assigned in the experiment configuration file will be overwritten. - # - # 2) If the experiment is not using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to a null string), then GRID_GEN_METHOD must - # be set in the experiment configuration file. Otherwise, it will - # remain set to a null string, and the experiment generation will - # fail because the generation scripts check to ensure that it is set - # to a non-empty string before creating the experiment directory. - # - #----------------------------------------------------------------------- - # - GRID_GEN_METHOD: "" - # - #----------------------------------------------------------------------- - # - # Set PREDEF_GRID_NAME. This parameter specifies a predefined regional - # grid, as follows: - # - # * If PREDEF_GRID_NAME is set to a valid predefined grid name, the grid - # generation method GRID_GEN_METHOD, the (native) grid parameters, and - # the write-component grid parameters are set to predefined values for - # the specified grid, overwriting any settings of these parameters in - # the user-specified experiment configuration file. In addition, if - # the time step DT_ATMOS and the computational parameters LAYOUT_X, - # LAYOUT_Y, and BLOCKSIZE are not specified in that configuration file, - # they are also set to predefined values for the specified grid. - # - # * If PREDEF_GRID_NAME is set to an empty string, it implies the user - # is providing the native grid parameters in the user-specified - # experiment configuration file (EXPT_CONFIG_FN). In this case, the - # grid generation method GRID_GEN_METHOD, the native grid parameters, - # and the write-component grid parameters as well as the time step - # forecast model's main time step DT_ATMOS and the computational - # parameters LAYOUT_X, LAYOUT_Y, and BLOCKSIZE must be set in that - # configuration file; otherwise, the values of all of these parameters - # in this default experiment configuration file will be used. - # - # Setting PREDEF_GRID_NAME provides a convenient method of specifying a - # commonly used set of grid-dependent parameters. The predefined grid - # parameters are specified in the script - # - # $HOMEaqm/ush/set_predef_grid_params.py - # - #----------------------------------------------------------------------- - # - PREDEF_GRID_NAME: "" - # - #----------------------------------------------------------------------- - # - # Set forecast parameters. Definitions: - # - # DATE_FIRST_CYCL: - # Starting cycle date of the FIRST forecast in the set of forecasts to - # run. Format is "YYYYMMDDHH". Note: This has recently changed to - # include the first cycle hour. - # - # DATE_LAST_CYCL: - # Starting cylce date of the LAST forecast in the set of forecasts to run. - # Format is "YYYYMMDDHH". Note: This has recently changed to include - # the last cycle hour. - # - # INCR_CYCL_FREQ: - # Increment in hours for Rocoto cycle frequency. - # Default is 24, which means cycle_freq=24:00:00 - # - # FCST_LEN_HRS: - # The length of each forecast, in integer hours. - # - # FCST_LEN_CYCL: - # The length of forecast for each cycle date in integer hours. - # This is valid only when FCST_LEN_HRS = -1. - # This pattern is recurred for all cycle dates. - # - #----------------------------------------------------------------------- - # - DATE_FIRST_CYCL: "YYYYMMDDHH" - DATE_LAST_CYCL: "YYYYMMDDHH" - INCR_CYCL_FREQ: 24 - FCST_LEN_HRS: 24 - FCST_LEN_CYCL: - - '{{ FCST_LEN_HRS }}' - - # - #----------------------------------------------------------------------- - # - # Set PREEXISTING_DIR_METHOD. This variable determines the method to use - # use to deal with preexisting directories [e.g ones generated by previous - # calls to the experiment generation script using the same experiment name - # (EXPT_SUBDIR) as the current experiment]. This variable must be set to - # one of "delete", "rename", and "quit". The resulting behavior for each - # of these values is as follows: - # - # * "delete": - # The preexisting directory is deleted and a new directory (having the - # same name as the original preexisting directory) is created. - # - # * "rename": - # The preexisting directory is renamed and a new directory (having the - # same name as the original preexisting directory) is created. The new - # name of the preexisting directory consists of its original name and - # the suffix "_oldNNN", where NNN is a 3-digit integer chosen to make - # the new name unique. - # - # * "quit": - # The preexisting directory is left unchanged, but execution of the - # currently running script is terminated. In this case, the preexisting - # directory must be dealt with manually before rerunning the script. - # - #----------------------------------------------------------------------- - # - PREEXISTING_DIR_METHOD: "delete" - # - #----------------------------------------------------------------------- - # - # Set flags for more detailed messages. Defintitions: - # - # VERBOSE: - # This is a flag that determines whether or not the experiment generation - # and workflow task scripts tend to print out more informational messages. - # - # DEBUG: - # This is a flag that determines whether or not very detailed debugging - # messages are printed to out. Note that if DEBUG is set to TRUE, then - # VERBOSE will also get reset to TRUE if it isn't already. - # - #----------------------------------------------------------------------- - # - VERBOSE: true - DEBUG: false - # - #----------------------------------------------------------------------- - # - # COMPILER: - # Type of compiler invoked during the build step. Currently, this must - # be set manually; it is not inherited from the build system in the - # ufs-srweather-app directory. - # - # SYMLINK_FIX_FILES: - # Symlink fix files to experiment directory if true; otherwise copy the files. - # - #------------------------------------------------------------------------ - # - COMPILER: "intel" - SYMLINK_FIX_FILES: false - # - #----------------------------------------------------------------------- - # - # DO_REAL_TIME: - # switch for real-time run - # - #----------------------------------------------------------------------- - # - DO_REAL_TIME: false - # - #----------------------------------------------------------------------- - # - # COLDSTART: - # Flag turning on/off warm start of the first cycle - # - # WARMSTART_CYCLE_DIR: - # Path to the directory where RESTART dir is located for warm start - # - #----------------------------------------------------------------------- - # - COLDSTART: true - WARMSTART_CYCLE_DIR: "/path/to/warm/start/cycle/dir" - -#---------------------------- -# NCO specific variables -#----------------------------- -nco: - # - #----------------------------------------------------------------------- - # - # All variables have the suffix _dfv meaning the default value. - # This is bacuase they are used as the default values for the production using ecFlow. - # - # Definitions: - # - # envir, NET, model_ver, RUN: - # Standard environment variables defined in the NCEP Central Operations WCOSS - # Implementation Standards document as follows: - # - # envir: - # Set to "test" during the initial testing phase, "para" when running - # in parallel (on a schedule), and "prod" in production. - # - # NET: - # Model name (first level of com directory structure) - # - # model_ver: - # Version number of package in three digits (second level of com directory) - # - # RUN: - # Name of model run (third level of com directory structure). - # In general, same as $NET - # - # OPSROOT: - # The operations root directory in NCO mode. - # - # LOGBASEDIR: - # Directory in which the log files from the workflow tasks will be placed. - # - # For more information on NCO standards - # - # https://www.nco.ncep.noaa.gov/idsb/implementation_standards/ImplementationStandards.v11.0.0.pdf - # - #----------------------------------------------------------------------- - # - envir_dfv: "prod" - NET_dfv: "aqm" - RUN_dfv: "aqm" - model_ver_dfv: "v7.0.0" - OPSROOT_dfv: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' - COMROOT_dfv: '{{ OPSROOT_dfv }}/com' - COMIN_BASEDIR: '{{ COMROOT_dfv }}/{{ NET_dfv }}/{{ model_ver_dfv }}' - COMOUT_BASEDIR: '{{ COMROOT_dfv }}/{{ NET_dfv }}/{{ model_ver_dfv }}' - - DATAROOT_dfv: '{{ OPSROOT_dfv }}/tmp' - DCOMROOT_dfv: '{{ OPSROOT_dfv }}/dcom' - LOGBASEDIR_dfv: '{{ OPSROOT_dfv }}/output' - - # - #----------------------------------------------------------------------- - # - # The following are also described in the NCO doc above: default values - # - #----------------------------------------------------------------------- - # - DBNROOT_dfv: "" - SENDECF_dfv: false - SENDDBN_dfv: false - SENDDBN_NTC_dfv: false - SENDCOM_dfv: false - SENDWEB_dfv: false - KEEPDATA_dfv: true - MAILTO_dfv: "" - MAILCC_dfv: "" - - -#---------------------------- -# WORKFLOW SWITCHES config parameters -#----------------------------- -workflow_switches: - # - #----------------------------------------------------------------------- - # - # Set flags (and related directories) that determine whether various - # workflow tasks should be run. Note that the TN_MAKE_GRID, TN_MAKE_OROG, - # and TN_MAKE_SFC_CLIMO are all cycle-independent tasks, i.e. if they - # are to be run, they do so only once at the beginning of the workflow - # before any cycles are run. Definitions: - # - # RUN_TASK_MAKE_GRID: - # Flag that determines whether the TN_MAKE_GRID task is to be run. If - # this is set to true, the grid generation task is run and new grid - # files are generated. If it is set to false, then the scripts look - # for pregenerated grid files in the directory specified by GRID_DIR - # (see below). - # - # RUN_TASK_MAKE_OROG: - # Same as RUN_TASK_MAKE_GRID but for the TN_MAKE_OROG task. - # - # RUN_TASK_MAKE_SFC_CLIMO: - # Same as RUN_TASK_MAKE_GRID but for the TN_MAKE_SFC_CLIMO task. - # - # RUN_TASK_GET_EXTRN_ICS: - # Flag that determines whether the TN_GET_EXTRN_ICS task is to be run. - # - # RUN_TASK_GET_EXTRN_LBCS: - # Flag that determines whether the TN_GET_EXTRN_LBCS task is to be run. - # - # RUN_TASK_MAKE_ICS: - # Flag that determines whether the TN_MAKE_ICS task is to be run. - # - # RUN_TASK_MAKE_LBCS: - # Flag that determines whether the TN_MAKE_LBCS task is to be run. - # - # RUN_TASK_RUN_FCST: - # Flag that determines whether the TN_RUN_FCST task is to be run. - # - # RUN_TASK_RUN_POST: - # Flag that determines whether the TN_RUN_POST task is to be run. - # - # RUN_TASK_RUN_PRDGEN: - # Flag that determines whether the TN_RUN_PRDGEN task is to be run. - # - # RUN_TASK_GET_OBS_CCPA: - # Flag that determines whether to run the TN_GET_OBS_CCPA task, which - # retrieves the CCPA hourly precipitation files used by METplus from NOAA HPSS. - # - # RUN_TASK_GET_OBS_MRMS: - # Flag that determines whether to run the TN_GET_OBS_MRMS task, which - # retrieves the MRMS composite reflectivity files used by METplus from NOAA HPSS. - # - # RUN_TASK_GET_OBS_NDAS: - # Flag that determines whether to run the TN_GET_OBS_NDAS task, which - # retrieves the NDAS PrepBufr files used by METplus from NOAA HPSS. - # - # RUN_TASK_VX_GRIDSTAT: - # Flag that determines whether the grid-stat verification task is to be - # run. - # - # RUN_TASK_VX_POINTSTAT: - # Flag that determines whether the point-stat verification task is to be - # run. - # - # RUN_TASK_VX_ENSGRID: - # Flag that determines whether the ensemble-stat verification for gridded - # data task is to be run. - # - # RUN_TASK_VX_ENSPOINT: - # Flag that determines whether the ensemble point verification task is - # to be run. If this flag is set, both ensemble-stat point verification - # and point verification of ensemble-stat output is computed. - # - # RUN_TASK_PLOT_ALLVARS: - # Flag that determines whether to run python plotting scripts - # - # RUN_TASK_AQM_ICS: - # Flag that determines whether the TN_AQM_ICS task is to be run for air quality modeling. - # - # RUN_TASK_AQM_LBCS: - # Flag that determines whether the TN_AQM_LBCS task is to be run for air quality modeling. - # - # RUN_TASK_NEXUS_GFS_SFC: - # Flag that determines whether the TN_NEXUS_GFS_SFC task is to be run for air quality modeling. - # - # RUN_TASK_NEXUS_EMISSION: - # Flag that determines whether the TN_NEXUS_EMISSION task is to be run for air quality modeling. - # - # RUN_TASK_FIRE_EMISSION: - # Flag that determines whether the TN_FIRE_EMISSION task is to be run for air quality modeling. - # - # RUN_TASK_POINT_SOURCE: - # Flag that determines whether the TN_POINT_SOURCE task is to be run for air quality modeling. - # - # RUN_TASK_PRE_POST_STAT: - # Flag that determines whether the TN_PRE_POST_STAT task is to be run for air quality modeling. - # - # RUN_TASK_POST_STAT_O3: - # Flag that determines whether the TN_POST_STAT_O3 task is to be run for air quality modeling. - # - # RUN_TASK_POST_STAT_PM25: - # Flag that determines whether the TN_POST_STAT_PM25 task is to be run for air quality modeling. - # - # RUN_TASK_BIAS_CORRECTION_O3: - # Flag that determines whether the TN_BIAS_CORRECTION_O3 task is to be run for air quality modeling. - # - # RUN_TASK_BIAS_CORRECTION_PM25: - # Flag that determines whether the TN_BIAS_CORRECTION_PM25 task is to be run for air quality modeling. - # - #----------------------------------------------------------------------- - # - RUN_TASK_MAKE_GRID: true - RUN_TASK_MAKE_OROG: true - RUN_TASK_MAKE_SFC_CLIMO: true - - RUN_TASK_GET_EXTRN_ICS: true - RUN_TASK_GET_EXTRN_LBCS: true - RUN_TASK_MAKE_ICS: true - RUN_TASK_MAKE_LBCS: true - RUN_TASK_RUN_FCST: true - RUN_TASK_RUN_POST: true - - RUN_TASK_RUN_PRDGEN: false - - RUN_TASK_GET_OBS_CCPA: false - RUN_TASK_GET_OBS_MRMS: false - RUN_TASK_GET_OBS_NDAS: false - RUN_TASK_VX_GRIDSTAT: false - RUN_TASK_VX_POINTSTAT: false - RUN_TASK_VX_ENSGRID: false - RUN_TASK_VX_ENSPOINT: false - - RUN_TASK_PLOT_ALLVARS: false - - RUN_TASK_AQM_ICS: false - RUN_TASK_AQM_LBCS: false - RUN_TASK_NEXUS_GFS_SFC: false - RUN_TASK_NEXUS_EMISSION: false - RUN_TASK_FIRE_EMISSION: false - RUN_TASK_POINT_SOURCE: false - RUN_TASK_PRE_POST_STAT: false - RUN_TASK_POST_STAT_O3: false - RUN_TASK_POST_STAT_PM25: false - RUN_TASK_BIAS_CORRECTION_O3: false - RUN_TASK_BIAS_CORRECTION_PM25: false - - -#---------------------------- -# MAKE GRID config parameters -#----------------------------- -task_make_grid: - TN_MAKE_GRID: "make_grid" - NNODES_MAKE_GRID: 1 - PPN_MAKE_GRID: 24 - WTIME_MAKE_GRID: 00:20:00 - MAXTRIES_MAKE_GRID: 2 - # - #----------------------------------------------------------------------- - # - # GRID_DIR: - # The directory in which to look for pregenerated grid files if - # RUN_TASK_MAKE_GRID is set to false. - # - #----------------------------------------------------------------------- - # - GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if workflow_switches.RUN_TASK_MAKE_GRID else "" }}' - # - #----------------------------------------------------------------------- - # - # Set parameters specific to the "ESGgrid" method of generating a regional - # grid (i.e. for GRID_GEN_METHOD set to "ESGgrid"). Definitions: - # - # ESGgrid_LON_CTR: - # The longitude of the center of the grid (in degrees). - # - # ESGgrid_LAT_CTR: - # The latitude of the center of the grid (in degrees). - # - # ESGgrid_DELX: - # The cell size in the zonal direction of the regional grid (in meters). - # - # ESGgrid_DELY: - # The cell size in the meridional direction of the regional grid (in - # meters). - # - # ESGgrid_NX: - # The number of cells in the zonal direction on the regional grid. - # - # ESGgrid_NY: - # The number of cells in the meridional direction on the regional grid. - # - # ESGgrid_WIDE_HALO_WIDTH: - # The width (in units of number of grid cells) of the halo to add around - # the regional grid before shaving the halo down to the width(s) expected - # by the forecast model. - # - # ESGgrid_PAZI: - # The rotational parameter for the ESG grid (in degrees). - # - # In order to generate grid files containing halos that are 3-cell and - # 4-cell wide and orography files with halos that are 0-cell and 3-cell - # wide (all of which are required as inputs to the forecast model), the - # grid and orography tasks first create files with halos around the regional - # domain of width ESGgrid_WIDE_HALO_WIDTH cells. These are first stored - # in files. The files are then read in and "shaved" down to obtain grid - # files with 3-cell-wide and 4-cell-wide halos and orography files with - # 0-cell-wide (i.e. no halo) and 3-cell-wide halos. For this reason, we - # refer to the original halo that then gets shaved down as the "wide" - # halo, i.e. because it is wider than the 0-cell-wide, 3-cell-wide, and - # 4-cell-wide halos that we will eventually end up with. Note that the - # grid and orography files with the wide halo are only needed as intermediates - # in generating the files with 0-cell-, 3-cell-, and 4-cell-wide halos; - # they are not needed by the forecast model. - # NOTE: Probably don't need to make ESGgrid_WIDE_HALO_WIDTH a user-specified - # variable. Just set it in the function set_gridparams_ESGgrid.py. - # - # Note that: - # - # 1) If the experiment is using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to the name of one of the valid predefined - # grids), then: - # - # a) If the value of GRID_GEN_METHOD for that grid is "GFDLgrid", then - # these parameters will not be used and thus do not need to be reset - # to non-empty strings. - # - # b) If the value of GRID_GEN_METHOD for that grid is "ESGgrid", then - # these parameters will get reset to the values for that grid. - # This will happen regardless of whether or not they are assigned - # values in the user-specified experiment configuration file, i.e. - # any values they may be assigned in the experiment configuration - # file will be overwritten. - # - # 2) If the experiment is not using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to a null string), then: - # - # a) If GRID_GEN_METHOD is set to "GFDLgrid" in the user-specified - # experiment configuration file, then these parameters will not be - # used and thus do not need to be reset to non-empty strings. - # - # b) If GRID_GEN_METHOD is set to "ESGgrid" in the user-specified - # experiment configuration file, then these parameters must be set - # in that configuration file. - # - #----------------------------------------------------------------------- - # - ESGgrid_LON_CTR: "" - ESGgrid_LAT_CTR: "" - ESGgrid_DELX: "" - ESGgrid_DELY: "" - ESGgrid_NX: "" - ESGgrid_NY: "" - ESGgrid_WIDE_HALO_WIDTH: "" - ESGgrid_PAZI: "" - - #----------------------------------------------------------------------- - # - # Set parameters specific to the "GFDLgrid" method of generating a regional - # grid (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). The following - # parameters will be used only if GRID_GEN_METHOD is set to "GFDLgrid". - # In this grid generation method: - # - # * The regional grid is defined with respect to a "parent" global cubed- - # sphere grid. Thus, all the parameters for a global cubed-sphere grid - # must be specified in order to define this parent global grid even - # though the model equations are not integrated on (they are integrated - # only on the regional grid). - # - # * GFDLgrid_NUM_CELLS is the number of grid cells in either one of the two - # horizontal directions x and y on any one of the 6 tiles of the parent - # global cubed-sphere grid. The mapping from GFDLgrid_NUM_CELLS to a nominal - # resolution (grid cell size) for a uniform global grid (i.e. Schmidt - # stretch factor GFDLgrid_STRETCH_FAC set to 1) for several values of - # GFDLgrid_NUM_CELLS is as follows: - # - # GFDLgrid_NUM_CELLS typical cell size - # ------------ ----------------- - # 192 50 km - # 384 25 km - # 768 13 km - # 1152 8.5 km - # 3072 3.2 km - # - # Note that these are only typical cell sizes. The actual cell size on - # the global grid tiles varies somewhat as we move across a tile. - # - # * Tile 6 has arbitrarily been chosen as the tile to use to orient the - # global parent grid on the sphere (Earth). This is done by specifying - # GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude - # and latitude (in degrees) of the center of tile 6. - # - # * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value - # greater than 1 shrinks tile 6, while setting it to a value less than - # 1 (but still greater than 0) expands it. The remaining 5 tiles change - # shape as necessary to maintain global coverage of the grid. - # - # * The cell size on a given global tile depends on both GFDLgrid_NUM_CELLS and - # GFDLgrid_STRETCH_FAC (since changing GFDLgrid_NUM_CELLS changes the number - # of cells in the tile, and changing GFDLgrid_STRETCH_FAC modifies the - # shape and size of the tile). - # - # * The regional grid is embedded within tile 6 (i.e. it doesn't extend - # beyond the boundary of tile 6). Its exact location within tile 6 is - # is determined by specifying the starting and ending i and j indices - # of the regional grid on tile 6, where i is the grid index in the x - # direction and j is the grid index in the y direction. These indices - # are stored in the variables - # - # GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G - # GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G - # GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G - # GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G - # - # * In the forecast model code and in the experiment generation and workflow - # scripts, for convenience the regional grid is denoted as "tile 7" even - # though it doesn't map back to one of the 6 faces of the cube from - # which the parent global grid is generated (it maps back to only a - # subregion on face 6 since it is wholly confined within tile 6). Tile - # 6 may be referred to as the "parent" tile of the regional grid. - # - # * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid - # (tile 7) with respect to the grid on its parent tile (tile 6), i.e. - # it is the number of grid cells along the boundary of the regional grid - # that abut one cell on tile 6. Thus, the cell size on the regional - # grid depends not only on GFDLgrid_NUM_CELLS and GFDLgrid_STRETCH_FAC (because - # the cell size on tile 6 depends on these two parameters) but also on - # GFDLgrid_REFINE_RATIO. Note that as on the tiles of the global grid, - # the cell size on the regional grid is not uniform but varies as we - # move across the grid. - # - # Definitions of parameters that need to be specified when GRID_GEN_METHOD - # is set to "GFDLgrid": - # - # GFDLgrid_LON_T6_CTR: - # Longitude of the center of tile 6 (in degrees). - # - # GFDLgrid_LAT_T6_CTR: - # Latitude of the center of tile 6 (in degrees). - # - # GFDLgrid_NUM_CELLS: - # Number of points in each of the two horizontal directions (x and y) on - # each tile of the parent global grid. Note that the name of this parameter - # is really a misnomer because although it has the string "RES" (for - # "resolution") in its name, it specifies number of grid cells, not grid - # size (in say meters or kilometers). However, we keep this name in order - # to remain consistent with the usage of the word "resolution" in the - # global forecast model and other auxiliary codes. - # - # GFDLgrid_STRETCH_FAC: - # Stretching factor used in the Schmidt transformation applied to the - # parent cubed-sphere grid. - # - # GFDLgrid_REFINE_RATIO: - # Cell refinement ratio for the regional grid, i.e. the number of cells - # in either the x or y direction on the regional grid (tile 7) that abut - # one cell on its parent tile (tile 6). - # - # GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: - # i-index on tile 6 at which the regional grid (tile 7) starts. - # - # GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: - # i-index on tile 6 at which the regional grid (tile 7) ends. - # - # GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: - # j-index on tile 6 at which the regional grid (tile 7) starts. - # - # GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: - # j-index on tile 6 at which the regional grid (tile 7) ends. - # - # GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: - # Flag that determines the file naming convention to use for grid, orography, - # and surface climatology files (or, if using pregenerated files, the - # naming convention that was used to name these files). These files - # usually start with the string "C${RES}_", where RES is an integer. - # In the global forecast model, RES is the number of points in each of - # the two horizontal directions (x and y) on each tile of the global grid - # (defined here as GFDLgrid_NUM_CELLS). If this flag is set to true, RES will - # be set to GFDLgrid_NUM_CELLS just as in the global forecast model. If it is - # set to false, we calculate (in the grid generation task) an "equivalent - # global uniform cubed-sphere resolution" -- call it RES_EQUIV -- and - # then set RES equal to it. RES_EQUIV is the number of grid points in - # each of the x and y directions on each tile that a global UNIFORM (i.e. - # stretch factor of 1) cubed-sphere grid would have to have in order to - # have the same average grid size as the regional grid. This is a more - # useful indicator of the grid size because it takes into account the - # effects of GFDLgrid_NUM_CELLS, GFDLgrid_STRETCH_FAC, and GFDLgrid_REFINE_RATIO - # in determining the regional grid's typical grid size, whereas simply - # setting RES to GFDLgrid_NUM_CELLS doesn't take into account the effects of - # GFDLgrid_STRETCH_FAC and GFDLgrid_REFINE_RATIO on the regional grid's - # resolution. Nevertheless, some users still prefer to use GFDLgrid_NUM_CELLS - # in the file names, so we allow for that here by setting this flag to - # true. - # - # Note that: - # - # 1) If the experiment is using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to the name of one of the valid predefined - # grids), then: - # - # a) If the value of GRID_GEN_METHOD for that grid is "GFDLgrid", then - # these parameters will get reset to the values for that grid. - # This will happen regardless of whether or not they are assigned - # values in the user-specified experiment configuration file, i.e. - # any values they may be assigned in the experiment configuration - # file will be overwritten. - # - # b) If the value of GRID_GEN_METHOD for that grid is "ESGgrid", then - # these parameters will not be used and thus do not need to be reset - # to non-empty strings. - # - # 2) If the experiment is not using one of the predefined grids (i.e. if - # PREDEF_GRID_NAME is set to a null string), then: - # - # a) If GRID_GEN_METHOD is set to "GFDLgrid" in the user-specified - # experiment configuration file, then these parameters must be set - # in that configuration file. - # - # b) If GRID_GEN_METHOD is set to "ESGgrid" in the user-specified - # experiment configuration file, then these parameters will not be - # used and thus do not need to be reset to non-empty strings. - # - #----------------------------------------------------------------------- - # - GFDLgrid_LON_T6_CTR: "" - GFDLgrid_LAT_T6_CTR: "" - GFDLgrid_NUM_CELLS: "" - GFDLgrid_STRETCH_FAC: "" - GFDLgrid_REFINE_RATIO: "" - GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: "" - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: "" - GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: "" - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: "" - GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: "" - # -#---------------------------- -# MAKE OROG config parameters -#----------------------------- -task_make_orog: - TN_MAKE_OROG: "make_orog" - NNODES_MAKE_OROG: 1 - PPN_MAKE_OROG: 24 - WTIME_MAKE_OROG: 00:20:00 - MAXTRIES_MAKE_OROG: 2 - KMP_AFFINITY_MAKE_OROG: "disabled" - OMP_NUM_THREADS_MAKE_OROG: 6 - OMP_STACKSIZE_MAKE_OROG: "2048m" - OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join if workflow_switches.RUN_TASK_MAKE_OROG else "" }}' - -#---------------------------- -# MAKE SFC CLIMO config parameters -#----------------------------- -task_make_sfc_climo: - TN_MAKE_SFC_CLIMO: "make_sfc_climo" - NNODES_MAKE_SFC_CLIMO: 2 - PPN_MAKE_SFC_CLIMO: 24 - WTIME_MAKE_SFC_CLIMO: 00:20:00 - MAXTRIES_MAKE_SFC_CLIMO: 2 - KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" - OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 - OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join if workflow_switches.RUN_TASK_MAKE_SFC_CLIMO else "" }}' - -#---------------------------- -# EXTRN ICS config parameters -#----------------------------- -task_get_extrn_ics: - TN_GET_EXTRN_ICS: "get_extrn_ics" - NNODES_GET_EXTRN_ICS: 1 - PPN_GET_EXTRN_ICS: 1 - MEM_GET_EXTRN_ICS: 2G - WTIME_GET_EXTRN_ICS: 00:45:00 - MAXTRIES_GET_EXTRN_ICS: 1 - # - #----------------------------------------------------------------------- - # - # Set initial and lateral boundary condition generation parameters. - # Definitions: - # - # EXTRN_MDL_NAME_ICS: - #`The name of the external model that will provide fields from which - # initial condition (including and surface) files will be generated for - # input into the forecast model. - # - # EXTRN_MDL_ICS_OFFSET_HRS: - # Users may wish to start a forecast from a forecast of a previous cycle - # of an external model. This variable sets the number of hours earlier - # the external model started than when the FV3 forecast configured here - # should start. For example, the forecast should start from a 6 hour - # forecast of the GFS, then EXTRN_MDL_ICS_OFFSET_HRS=6. - # - # FV3GFS_FILE_FMT_ICS: - # If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS - # is set to "FV3GFS"), this variable specifies the format of the model - # files to use when generating the ICs. - # - #----------------------------------------------------------------------- - # - EXTRN_MDL_NAME_ICS: "FV3GFS" - EXTRN_MDL_ICS_OFFSET_HRS: 0 - FV3GFS_FILE_FMT_ICS: "nemsio" - # - #----------------------------------------------------------------------- - # - # Base directories in which to search for external model files. - # - # EXTRN_MDL_SYSBASEDIR_ICS: - # Base directory on the local machine containing external model files for - # generating ICs on the native grid. The way the full path containing - # these files is constructed depends on the user-specified external model - # for ICs, i.e. EXTRN_MDL_NAME_ICS. - # - # Note that this must be defined as a null string here so that if it is - # specified by the user in the experiment configuration file, it remains - # set to those values, and if not, it gets set to machine-dependent - # values. - # - #----------------------------------------------------------------------- - # - EXTRN_MDL_SYSBASEDIR_ICS: '' - # - #----------------------------------------------------------------------- - # - # User-staged external model directories and files. Definitions: - # - # USE_USER_STAGED_EXTRN_FILES: - # Flag that determines whether or not the workflow will look for the - # external model files needed for generating ICs in user-specified - # directories. - # - # EXTRN_MDL_SOURCE_BASEDIR_ICS: - # Directory in which to look for external model files for generating ICs. - # If USE_USER_STAGED_EXTRN_FILES is set to true, the workflow looks in - # this directory (specifically, in a subdirectory under this directory - # named "YYYYMMDDHH" consisting of the starting date and cycle hour of - # the forecast, where YYYY is the 4-digit year, MM the 2-digit month, DD - # the 2-digit day of the month, and HH the 2-digit hour of the day) for - # the external model files specified by the array EXTRN_MDL_FILES_ICS - # (these files will be used to generate the ICs on the native FV3-LAM - # grid). This variable is not used if USE_USER_STAGED_EXTRN_FILES is - # set to false. - # - # EXTRN_MDL_FILES_ICS: - # Array containing templates of the names of the files to search for in - # the directory specified by EXTRN_MDL_SOURCE_BASEDIR_ICS. This - # variable is not used if USE_USER_STAGED_EXTRN_FILES is set to false. - # A single template should be used for each model file type that is - # meant to be used. You may use any of the Python-style templates - # allowed in the ush/retrieve_data.py script. To see the full list of - # supported templates, run that script with a -h option. Here is an example of - # setting FV3GFS nemsio input files: - # EXTRN_MDL_FILES_ICS=( gfs.t{hh}z.atmf{fcst_hr:03d}.nemsio \ - # gfs.t{hh}z.sfcf{fcst_hr:03d}.nemsio ) - # Or for FV3GFS grib files: - # EXTRN_MDL_FILES_ICS=( gfs.t{hh}z.pgrb2.0p25.f{fcst_hr:03d} ) - # - #----------------------------------------------------------------------- - # - USE_USER_STAGED_EXTRN_FILES: false - EXTRN_MDL_SOURCE_BASEDIR_ICS: "" - EXTRN_MDL_FILES_ICS: "" - -#---------------------------- -# EXTRN LBCS config parameters -#----------------------------- -task_get_extrn_lbcs: - TN_GET_EXTRN_LBCS: "get_extrn_lbcs" - NNODES_GET_EXTRN_LBCS: 1 - PPN_GET_EXTRN_LBCS: 1 - MEM_GET_EXTRN_LBCS: 2G - WTIME_GET_EXTRN_LBCS: 00:45:00 - MAXTRIES_GET_EXTRN_LBCS: 1 - # - #----------------------------------------------------------------------- - # - # EXTRN_MDL_NAME_LBCS: - #`The name of the external model that will provide fields from which - # lateral boundary condition (LBC) files (except for the 0-th hour LBC - # file) will be generated for input into the forecast model. - # - # LBC_SPEC_INTVL_HRS: - # The interval (in integer hours) with which LBC files will be generated. - # We will refer to this as the boundary update interval. Note that the - # model specified in EXTRN_MDL_NAME_LBCS must have data available at a - # frequency greater than or equal to that implied by LBC_SPEC_INTVL_HRS. - # For example, if LBC_SPEC_INTVL_HRS is set to 6, then the model must have - # data availble at least every 6 hours. It is up to the user to ensure - # that this is the case. - # - # EXTRN_MDL_LBCS_OFFSET_HRS: - # Users may wish to use lateral boundary conditions from a forecast that - # was started earlier than the initial time for the FV3 forecast - # configured here. This variable sets the number of hours earlier - # the external model started than when the FV3 forecast configured here - # should start. For example, the forecast should use lateral boundary - # conditions from the GFS started 6 hours earlier, then - # EXTRN_MDL_LBCS_OFFSET_HRS=6. Defaults to 0 except for RAP, which - # uses a 3 hour offset. - # - # FV3GFS_FILE_FMT_LBCS: - # If using the FV3GFS model as the source of the LBCs (i.e. if - # EXTRN_MDL_NAME_LBCS is set to "FV3GFS"), this variable specifies the - # format of the model files to use when generating the LBCs. - # - #----------------------------------------------------------------------- - # - EXTRN_MDL_NAME_LBCS: "FV3GFS" - LBC_SPEC_INTVL_HRS: 6 - EXTRN_MDL_LBCS_OFFSET_HRS: '{{ 3 if EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' - FV3GFS_FILE_FMT_LBCS: "nemsio" - #----------------------------------------------------------------------- - # - # EXTRN_MDL_SYSBASEDIR_LBCS: - # Same as EXTRN_MDL_SYSBASEDIR_ICS but for LBCs. - # - # Note that this must be defined as a null string here so that if it is - # specified by the user in the experiment configuration file, it remains - # set to those values, and if not, it gets set to machine-dependent - # values. - # - #----------------------------------------------------------------------- - # - EXTRN_MDL_SYSBASEDIR_LBCS: '' - # - #----------------------------------------------------------------------- - # - # User-staged external model directories and files. Definitions: - # - # USE_USER_STAGED_EXTRN_FILES: - # Analogous to USE_USER_STAGED_EXTRN_FILES in ICS but for LBCs - # - # EXTRN_MDL_SOURCE_BASEDIR_LBCS: - # Analogous to EXTRN_MDL_SOURCE_BASEDIR_ICS but for LBCs instead of ICs. - # - # EXTRN_MDL_FILES_LBCS: - # Analogous to EXTRN_MDL_FILES_ICS but for LBCs instead of ICs. - # - #----------------------------------------------------------------------- - # - USE_USER_STAGED_EXTRN_FILES: false - EXTRN_MDL_SOURCE_BASEDIR_LBCS: "" - EXTRN_MDL_FILES_LBCS: "" - -#---------------------------- -# MAKE ICS config parameters -#----------------------------- -task_make_ics: - TN_MAKE_ICS: "make_ics" - NNODES_MAKE_ICS: 4 - PPN_MAKE_ICS: 12 - WTIME_MAKE_ICS: 00:30:00 - MAXTRIES_MAKE_ICS: 1 - KMP_AFFINITY_MAKE_ICS: "scatter" - OMP_NUM_THREADS_MAKE_ICS: 1 - OMP_STACKSIZE_MAKE_ICS: "1024m" - # - #----------------------------------------------------------------------- - # - # USE_FVCOM: - # Flag set to update surface conditions in FV3-LAM with fields generated - # from the Finite Volume Community Ocean Model (FVCOM). This will - # replace lake/sea surface temperature, ice surface temperature, and ice - # placement. FVCOM data must already be interpolated to the desired - # FV3-LAM grid. This flag will be used in make_ics to modify sfc_data.nc - # after chgres_cube is run by running the routine process_FVCOM.exe - # - # FVCOM_WCSTART: - # Define if this is a "warm" start or a "cold" start. Setting this to - # "warm" will read in sfc_data.nc generated in a RESTART directory. - # Setting this to "cold" will read in the sfc_data.nc generated from - # chgres_cube in the make_ics portion of the workflow. - # - # FVCOM_DIR: - # User defined directory where FVCOM data already interpolated to FV3-LAM - # grid is located. File name in this path should be "fvcom.nc" to allow - # - # FVCOM_FILE: - # Name of file located in FVCOM_DIR that has FVCOM data interpolated to - # FV3-LAM grid. This file will be copied later to a new location and name - # changed to fvcom.nc - # - #------------------------------------------------------------------------ - # - USE_FVCOM: false - FVCOM_WCSTART: "cold" - FVCOM_DIR: "" - FVCOM_FILE: "fvcom.nc" - -#---------------------------- -# MAKE LBCS config parameters -#----------------------------- -task_make_lbcs: - TN_MAKE_LBCS: "make_lbcs" - NNODES_MAKE_LBCS: 4 - PPN_MAKE_LBCS: 12 - WTIME_MAKE_LBCS: 00:30:00 - MAXTRIES_MAKE_LBCS: 1 - KMP_AFFINITY_MAKE_LBCS: "scatter" - OMP_NUM_THREADS_MAKE_LBCS: 1 - OMP_STACKSIZE_MAKE_LBCS: "1024m" - -#---------------------------- -# FORECAST config parameters -#----------------------------- -task_run_fcst: - TN_RUN_FCST: "run_fcst" - NNODES_RUN_FCST: '{{ (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST }}' - PPN_RUN_FCST: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST }}' - WTIME_RUN_FCST: 04:30:00 - MAXTRIES_RUN_FCST: 1 - FV3_EXEC_FP: '{{ [user.EXECdir, workflow.FV3_EXEC_FN]|path_join }}' - # - #----------------------------------------------------------------------- - # - # KMP_AFFINITY_*: - # From Intel: "The Intel® runtime library has the ability to bind OpenMP - # threads to physical processing units. The interface is controlled using - # the KMP_AFFINITY environment variable. Depending on the system (machine) - # topology, application, and operating system, thread affinity can have a - # dramatic effect on the application speed. - # - # Thread affinity restricts execution of certain threads (virtual execution - # units) to a subset of the physical processing units in a multiprocessor - # computer. Depending upon the topology of the machine, thread affinity can - # have a dramatic effect on the execution speed of a program." - # - # For more information, see the following link: - # https://software.intel.com/content/www/us/en/develop/documentation/cpp- - # compiler-developer-guide-and-reference/top/optimization-and-programming- - # guide/openmp-support/openmp-library-support/thread-affinity-interface- - # linux-and-windows.html - # - # OMP_NUM_THREADS_*: - # The number of OpenMP threads to use for parallel regions. - # - # OMP_STACKSIZE_*: - # Controls the size of the stack for threads created by the OpenMP - # implementation. - # - # Note that settings for the make_grid and make_orog tasks are not - # included below as they do not use parallelized code. - # - #----------------------------------------------------------------------- - # - KMP_AFFINITY_RUN_FCST: "scatter" - OMP_NUM_THREADS_RUN_FCST: 1 # ATM_omp_num_threads in nems.configure - OMP_STACKSIZE_RUN_FCST: "512m" - # - #----------------------------------------------------------------------- - # - # Set model_configure parameters. Definitions: - # - # DT_ATMOS: - # The main forecast model integration time step. As described in the - # forecast model documentation, "It corresponds to the frequency with - # which the top level routine in the dynamics is called as well as the - # frequency with which the physics is called." - # - # FHROT: - # Forecast hour at restart - # - # RESTART_INTERVAL: - # frequency of the output restart files (unit:hour). - # Default=0: restart files are produced at the end of a forecast run - # For example, i) RESTART_INTERVAL: 1 -1 => restart files are produced - # every hour with the prefix "YYYYMMDD.HHmmSS." in the RESTART directory - # ii) RESTART_INTERVAL: 1 2 5 => restart files are produced only when - # fh = 1, 2, and 5. - # - # WRITE_DOPOST: - # Flag that determines whether or not to use the inline post feature - # [i.e. calling the Unified Post Processor (UPP) from within the weather - # model]. If this is set to true, the TN_RUN_POST task is deactivated - # (i.e. RUN_TASK_RUN_POST is set to false) to avoid unnecessary - # computations. - # - #----------------------------------------------------------------------- - # - DT_ATMOS: "" - FHROT: 0 - RESTART_INTERVAL: 0 - WRITE_DOPOST: false - # - #----------------------------------------------------------------------- - # - # Set computational parameters for the forecast. Definitions: - # - # LAYOUT_X, LAYOUT_Y: - # The number of MPI tasks (processes) to use in the two horizontal - # directions (x and y) of the regional grid when running the forecast - # model. - # - # BLOCKSIZE: - # The amount of data that is passed into the cache at a time. - # - # Here, we set these parameters to null strings. This is so that, for - # any one of these parameters: - # - # 1) If the experiment is using a predefined grid, then if the user - # sets the parameter in the user-specified experiment configuration - # file (EXPT_CONFIG_FN), that value will be used in the forecast(s). - # Otherwise, the default value of the parameter for that predefined - # grid will be used. - # - # 2) If the experiment is not using a predefined grid (i.e. it is using - # a custom grid whose parameters are specified in the experiment - # configuration file), then the user must specify a value for the - # parameter in that configuration file. Otherwise, the parameter - # will remain set to a null string, and the experiment generation - # will fail because the generation scripts check to ensure that all - # the parameters defined in this section are set to non-empty strings - # before creating the experiment directory. - # - #----------------------------------------------------------------------- - # - LAYOUT_X: '{{ LAYOUT_X }}' - LAYOUT_Y: '{{ LAYOUT_Y }}' - BLOCKSIZE: '{{ BLOCKSIZE }}' - # - #----------------------------------------------------------------------- - # - # Set write-component (quilting) parameters. Definitions: - # - # QUILTING: - # Flag that determines whether or not to use the write component for - # writing output files to disk. The regional grid requires the use of - # the write component, so users should not change the default value. - # - # PRINT_ESMF: - # Flag for whether or not to output extra (debugging) information from - # ESMF routines. Must be true or false. Note that the write - # component uses ESMF library routines to interpolate from the native - # forecast model grid to the user-specified output grid (which is defined - # in the model configuration file "model_configure" in the forecast's - # run directory). - # - # WRTCMP_write_groups: - # The number of write groups (i.e. groups of MPI tasks) to use in the - # write component. - # - # WRTCMP_write_tasks_per_group: - # The number of MPI tasks to allocate for each write group. - # - # WRTCMP_output_grid: - # Sets the type (coordinate system) of the write component grid. The - # default empty string forces the user to set a valid value for - # WRTCMP_output_grid in config.yaml if specifying a *custom* grid. When - # creating an experiment with a user-defined grid, this parameter must - # be specified or the experiment will fail. - # - # WRTCMP_cen_lon: - # Longitude (in degrees) of the center of the write component grid. Can - # usually be set to the corresponding value from the native grid. - # - # WRTCMP_cen_lat: - # Latitude (in degrees) of the center of the write component grid. Can - # usually be set to the corresponding value from the native grid. - # WRTCMP_lon_lwr_left: - # Longitude (in degrees) of the center of the lower-left (southwest) - # cell on the write component grid. If using the "rotated_latlon" - # coordinate system, this is expressed in terms of the rotated longitude. - # Must be set manually when running an experiment with a user-defined grid. - # - # WRTCMP_lat_lwr_left: - # Latitude (in degrees) of the center of the lower-left (southwest) cell - # on the write component grid. If using the "rotated_latlon" coordinate - # system, this is expressed in terms of the rotated latitude. Must be set - # manually when running an experiment with a user-defined grid. - # - # ----------------------------------------------------------------------- - # - # WRTCMP_lon_upr_rght: - # Longitude (in degrees) of the center of the upper-right (northeast) cell - # on the write component grid (expressed in terms of the rotated longitude). - # - # WRTCMP_lat_upr_rght: - # Latitude (in degrees) of the center of the upper-right (northeast) cell - # on the write component grid (expressed in terms of the rotated latitude). - # - # WRTCMP_dlon: - # Size (in degrees) of a grid cell on the write component grid (expressed - # in terms of the rotated longitude). - # - # WRTCMP_dlat: - # Size (in degrees) of a grid cell on the write component grid (expressed - # in terms of the rotated latitude). - # - # ----------------------------------------------------------------------- - # - # WRTCMP_stdlat1: - # First standard latitude (in degrees) in definition of Lambert conformal - # projection. - # - # WRTCMP_stdlat2: - # Second standard latitude (in degrees) in definition of Lambert conformal - # projection. - # - # WRTCMP_nx: - # Number of grid points in the x-coordinate of the Lambert conformal - # projection. - # - # WRTCMP_ny: - # Number of grid points in the y-coordinate of the Lambert conformal - # projection. - # - # WRTCMP_dx: - # Grid cell size (in meters) along the x-axis of the Lambert conformal - # projection. - # - # WRTCMP_dy: - # Grid cell size (in meters) along the y-axis of the Lambert conformal - # projection. - # - #----------------------------------------------------------------------- - # - QUILTING: true - PRINT_ESMF: false - - PE_MEMBER01: '{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}' - - WRTCMP_write_groups: "" - WRTCMP_write_tasks_per_group: "" - - WRTCMP_output_grid: "''" - WRTCMP_cen_lon: "" - WRTCMP_cen_lat: "" - WRTCMP_lon_lwr_left: "" - WRTCMP_lat_lwr_left: "" - # - # The following are used only for the case of WRTCMP_output_grid set to - # "'rotated_latlon'". - # - WRTCMP_lon_upr_rght: "" - WRTCMP_lat_upr_rght: "" - WRTCMP_dlon: "" - WRTCMP_dlat: "" - # - # The following are used only for the case of WRTCMP_output_grid set to - # "'lambert_conformal'". - # - WRTCMP_stdlat1: "" - WRTCMP_stdlat2: "" - WRTCMP_nx: "" - WRTCMP_ny: "" - WRTCMP_dx: "" - WRTCMP_dy: "" - # - #----------------------------------------------------------------------- - # - # Flag that determines whether MERRA2 aerosol climatology data and - # lookup tables for optics properties are obtained - # - #----------------------------------------------------------------------- - # - USE_MERRA_CLIMO: '{{ workflow.CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km" or workflow.CCPP_PHYS_SUITE == "FV3_GFS_v17_p8" }}' - # - #----------------------------------------------------------------------- - # - # DO_FCST_RESTART: - # Flag turning on/off restart capability of forecast task - # - #----------------------------------------------------------------------- - # - DO_FCST_RESTART: false - -#---------------------------- -# POST config parameters -#----------------------------- -task_run_post: - TN_RUN_POST: "run_post" - NNODES_RUN_POST: 2 - PPN_RUN_POST: 24 - WTIME_RUN_POST: 00:15:00 - MAXTRIES_RUN_POST: 2 - KMP_AFFINITY_RUN_POST: "scatter" - OMP_NUM_THREADS_RUN_POST: 1 - OMP_STACKSIZE_RUN_POST: "1024m" - # - #----------------------------------------------------------------------- - # - # Set parameters associated with subhourly forecast model output and - # post-processing. - # - # SUB_HOURLY_POST: - # Flag that indicates whether the forecast model will generate output - # files on a sub-hourly time interval (e.g. 10 minutes, 15 minutes, etc). - # This will also cause the post-processor to process these sub-hourly - # files. If ths is set to true, then DT_SUBHOURLY_POST_MNTS should be - # set to a value between "00" and "59". - # - # DT_SUB_HOURLY_POST_MNTS: - # Time interval in minutes between the forecast model output files. If - # SUB_HOURLY_POST is set to true, this needs to be set to a two-digit - # integer between "01" and "59". This is not used if SUB_HOURLY_POST is - # not set to true. Note that if SUB_HOURLY_POST is set to true but - # DT_SUB_HOURLY_POST_MNTS is set to "00", SUB_HOURLY_POST will get reset - # to false in the experiment generation scripts (there will be an - # informational message in the log file to emphasize this). - # - #----------------------------------------------------------------------- - # - SUB_HOURLY_POST: false - DT_SUBHOURLY_POST_MNTS: 0 - # - #----------------------------------------------------------------------- - # - # Set parameters for customizing the post-processor (UPP). Definitions: - # - # USE_CUSTOM_POST_CONFIG_FILE: - # Flag that determines whether a user-provided custom configuration file - # should be used for post-processing the model data. If this is set to - # true, then the workflow will use the custom post-processing (UPP) - # configuration file specified in CUSTOM_POST_CONFIG_FP. Otherwise, a - # default configuration file provided in the UPP repository will be - # used. - # - # CUSTOM_POST_CONFIG_FP: - # The full path to the custom post flat file, including filename, to be - # used for post-processing. This is only used if CUSTOM_POST_CONFIG_FILE - # is set to true. - # - # TESTBED_FIELDS_FN - # The file which lists grib2 fields to be extracted for testbed files - # Empty string means no need to generate testbed files - # - # POST_OUTPUT_DOMAIN_NAME: - # Domain name (in lowercase) used in constructing the names of the output - # files generated by UPP [which is called either by running the TN_RUN_POST - # task or by activating the inline post feature (WRITE_DOPOST set to true)]. - # The post output files are named as follows: - # - # $NET.tHHz.[var_name].f###.${POST_OUTPUT_DOMAIN_NAME}.grib2 - # - # If using a custom grid, POST_OUTPUT_DOMAIN_NAME must be specified by - # the user. If using a predefined grid, POST_OUTPUT_DOMAIN_NAME defaults - # to PREDEF_GRID_NAME. Note that this variable is first changed to lower - # case before being used to construct the file names. - # - #----------------------------------------------------------------------- - # - USE_CUSTOM_POST_CONFIG_FILE: false - CUSTOM_POST_CONFIG_FP: "" - POST_OUTPUT_DOMAIN_NAME: '{{ workflow.PREDEF_GRID_NAME }}' - TESTBED_FIELDS_FN: "" - -#---------------------------- -# RUN PRDGEN config parameters -#----------------------------- -task_run_prdgen: - TN_RUN_PRDGEN: "run_prdgen" - NNODES_RUN_PRDGEN: 1 - PPN_RUN_PRDGEN: 22 - WTIME_RUN_PRDGEN: 00:30:00 - MAXTRIES_RUN_PRDGEN: 1 - KMP_AFFINITY_RUN_PRDGEN: "scatter" - OMP_NUM_THREADS_RUN_PRDGEN: 1 - OMP_STACKSIZE_RUN_PRDGEN: "1024m" - #----------------------------------------------------------------------- - # - # Flag that determines whether to use CFP to run the product generation - # job in parallel. This should be used with the RRFS_NA_3km grid. - # - #----------------------------------------------------------------------- - DO_PARALLEL_PRDGEN: false - # - # - #----------------------------------------------------------------------- - # - # Set additional output grids for wgrib2 remapping, if any - # Space-separated list of strings, e.g., ( "130" "242" "clue" ) - # Default is no additional grids - # - # Current options as of 23 Apr 2021: - # "130" (CONUS 13.5 km) - # "200" (Puerto Rico 16 km) - # "221" (North America 32 km) - # "242" (Alaska 11.25 km) - # "243" (Pacific 0.4-deg) - # "clue" (NSSL/SPC 3-km CLUE grid for 2020/2021) - # "hrrr" (HRRR 3-km CONUS grid) - # "hrrre" (HRRRE 3-km CONUS grid) - # "rrfsak" (RRFS 3-km Alaska grid) - # "hrrrak" (HRRR 3-km Alaska grid) - # - #----------------------------------------------------------------------- - # - ADDNL_OUTPUT_GRIDS: [] - -#---------------------------- -# PLOT_ALLVARS config parameters -#----------------------------- -task_plot_allvars: - TN_PLOT_ALLVARS: "plot_allvars" - NNODES_PLOT_ALLVARS: 1 - PPN_PLOT_ALLVARS: 24 - WTIME_PLOT_ALLVARS: 01:00:00 - MAXTRIES_PLOT_ALLVARS: 1 - #------------------------------------------------------------------------- - # Reference experiment's COMOUT directory. This is where the GRIB2 files - # from postprocessing are located. Make this a template to compare - # multiple cycle and dates. COMOUT_REF should end with: - # nco mode: $PDY/$cyc - # community mode: $PDY$cyc/postprd - # We don't do this inside the code, so that we can compare nco vs com runs. - #------------------------------------------------------------------------- - COMOUT_REF: "" - #------------------------------ - # Plot fcts start and increment - #------------------------------ - PLOT_FCST_START: 0 - PLOT_FCST_INC: 3 - #----------------------------------- - # By default the end is FCST_LEN_HRS - #----------------------------------- - PLOT_FCST_END: "" - #------------------------------------------------------------------------------ - # Domains to plot. Currently supported are either "conus" or "regional" or both - #------------------------------------------------------------------------------- - PLOT_DOMAINS: ["conus"] - -#---------------------------- -# GET OBS CCPA config parameters -#----------------------------- -task_get_obs_ccpa: - TN_GET_OBS_CCPA: "get_obs_ccpa" - NNODES_GET_OBS_CCPA: 1 - PPN_GET_OBS_CCPA: 1 - MEM_GET_OBS_CCPA: 2G - WTIME_GET_OBS_CCPA: 00:45:00 - MAXTRIES_GET_OBS_CCPA: 1 - -#---------------------------- -# GET OBS MRMS config parameters -#----------------------------- -task_get_obs_mrms: - TN_GET_OBS_MRMS: "get_obs_mrms" - NNODES_GET_OBS_MRMS: 1 - PPN_GET_OBS_MRMS: 1 - MEM_GET_OBS_MRMS: 2G - WTIME_GET_OBS_MRMS: 00:45:00 - MAXTRIES_GET_OBS_MRMS: 1 - -#---------------------------- -# GET OBS NDAS config parameters -#----------------------------- -task_get_obs_ndas: - TN_GET_OBS_NDAS: "get_obs_ndas" - NNODES_GET_OBS_NDAS: 1 - PPN_GET_OBS_NDAS: 1 - MEM_GET_OBS_NDAS: 2G - WTIME_GET_OBS_NDAS: 02:00:00 - MAXTRIES_GET_OBS_NDAS: 1 - -#---------------------------- -# tn_run_met_pb2nc_obs config parameters -#----------------------------- -task_tn_run_met_pb2nc_obs: - TN_RUN_MET_PB2NC_OBS: "run_MET_Pb2nc_obs" - NNODES_RUN_MET_PB2NC_OBS: 1 - PPN_RUN_MET_PB2NC_OBS: 1 - MEM_RUN_MET_PB2NC_OBS: 2G - WTIME_RUN_MET_PB2NC_OBS: 00:30:00 - MAXTRIES_RUN_MET_PB2NC_OBS: 2 - -#---------------------------- -# tn_run_met_pcpcombine config parameters -#----------------------------- -task_tn_run_met_pcpcombine: - TN_RUN_MET_PCPCOMBINE: "run_MET_PcpCombine" -# - NNODES_RUN_MET_PCPCOMBINE_OBS: 1 - PPN_RUN_MET_PCPCOMBINE_OBS: 1 - MEM_RUN_MET_PCPCOMBINE_OBS: 2G - WTIME_RUN_MET_PCPCOMBINE_OBS: 00:30:00 - MAXTRIES_RUN_MET_PCPCOMBINE_OBS: 2 -# - NNODES_RUN_MET_PCPCOMBINE_FCST: 1 - PPN_RUN_MET_PCPCOMBINE_FCST: 1 - MEM_RUN_MET_PCPCOMBINE_FCST: 2G - WTIME_RUN_MET_PCPCOMBINE_FCST: 00:30:00 - MAXTRIES_RUN_MET_PCPCOMBINE_FCST: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_APCP01H: "run_MET_GridStat_vx_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP01H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_APCP03H: "run_MET_GridStat_vx_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP03H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_APCP06H: "run_MET_GridStat_vx_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP06H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_APCP24H: "run_MET_GridStat_vx_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP24H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP24H: 2 - -#---------------------------- -# run_met_gridstat_vx_refc config parameters -#----------------------------- -task_run_met_gridstat_vx_refc: - TN_RUN_MET_GRIDSTAT_VX_REFC: "run_MET_GridStat_vx_REFC" - NNODES_RUN_MET_GRIDSTAT_VX_REFC: 1 - PPN_RUN_MET_GRIDSTAT_VX_REFC: 1 - MEM_RUN_MET_GRIDSTAT_VX_REFC: 2G - WTIME_RUN_MET_GRIDSTAT_VX_REFC: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_REFC: 2 - -#---------------------------- -# run_met_gridstat_vx_retop config parameters -#----------------------------- -task_run_met_gridstat_vx_retop: - TN_RUN_MET_GRIDSTAT_VX_RETOP: "run_MET_GridStat_vx_RETOP" - NNODES_RUN_MET_GRIDSTAT_VX_RETOP: 1 - PPN_RUN_MET_GRIDSTAT_VX_RETOP: 1 - MEM_RUN_MET_GRIDSTAT_VX_RETOP: 2G - WTIME_RUN_MET_GRIDSTAT_VX_RETOP: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_RETOP: 2 - -#---------------------------- -# run_met_pointstat_vx_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_sfc: - TN_RUN_MET_POINTSTAT_VX_SFC: "run_MET_PointStat_vx_SFC" - NNODES_RUN_MET_POINTSTAT_VX_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_upa: - TN_RUN_MET_POINTSTAT_VX_UPA: "run_MET_PointStat_vx_UPA" - NNODES_RUN_MET_POINTSTAT_VX_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_UPA: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp01h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp01h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP01H: "run_MET_EnsembleStat_vx_APCP01h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp03h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp03h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP03H: "run_MET_EnsembleStat_vx_APCP03h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp06h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp06h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP06H: "run_MET_EnsembleStat_vx_APCP06h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp24h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp24h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP24H: "run_MET_EnsembleStat_vx_APCP24h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_refc config parameters -#----------------------------- -task_run_met_ensemblestat_vx_refc: - TN_RUN_MET_ENSEMBLESTAT_VX_REFC: "run_MET_EnsembleStat_vx_REFC" - NNODES_RUN_MET_ENSEMBLESTAT_VX_REFC: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_REFC: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_REFC: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_REFC: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_REFC: 2 - -#---------------------------- -# run_met_ensemblestat_vx_retop config parameters -#----------------------------- -task_run_met_ensemblestat_vx_retop: - TN_RUN_MET_ENSEMBLESTAT_VX_RETOP: "run_MET_EnsembleStat_vx_RETOP" - NNODES_RUN_MET_ENSEMBLESTAT_VX_RETOP: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_RETOP: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_RETOP: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_RETOP: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_RETOP: 2 - -#---------------------------- -# run_met_ensemblestat_vx_sfc config parameters -#----------------------------- -task_run_met_ensemblestat_vx_sfc: - TN_RUN_MET_ENSEMBLESTAT_VX_SFC: "run_MET_EnsembleStat_vx_SFC" - NNODES_RUN_MET_ENSEMBLESTAT_VX_SFC: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_SFC: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_SFC: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_SFC: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_SFC: 2 - -#---------------------------- -# run_met_ensemblestat_vx_upa config parameters -#----------------------------- -task_run_met_ensemblestat_vx_upa: - TN_RUN_MET_ENSEMBLESTAT_VX_UPA: "run_MET_EnsembleStat_vx_UPA" - NNODES_RUN_MET_ENSEMBLESTAT_VX_UPA: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_UPA: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_UPA: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_UPA: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_UPA: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: "run_MET_GridStat_vx_ensmean_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: "run_MET_GridStat_vx_ensmean_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: "run_MET_GridStat_vx_ensmean_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: "run_MET_GridStat_vx_ensmean_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 2 - -#---------------------------- -# run_met_pointstat_vx_ensmean_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_ensmean_sfc: - TN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: "run_MET_PointStat_vx_ensmean_SFC" - NNODES_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_ensmean_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_ensmean_upa: - TN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: "run_MET_PointStat_vx_ensmean_UPA" - NNODES_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: "run_MET_GridStat_vx_ensprob_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: "run_MET_GridStat_vx_ensprob_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: "run_MET_GridStat_vx_ensprob_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: "run_MET_GridStat_vx_ensprob_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_refc config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_refc: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: "run_MET_GridStat_vx_ensprob_REFC" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_retop config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_retop: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: "run_MET_GridStat_vx_ensprob_RETOP" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 2 - -#---------------------------- -# run_met_pointstat_vx_ensprob_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_ensprob_sfc: - TN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: "run_MET_PointStat_vx_ensprob_SFC" - NNODES_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_ensprob_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_ensprob_upa: - TN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: "run_MET_PointStat_vx_ensprob_UPA" - NNODES_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 2 - -#---------------------------- -# AQM_ICS config parameters -#----------------------------- -task_aqm_ics: - TN_AQM_ICS: "aqm_ics" - NNODES_AQM_ICS: 1 - PPN_AQM_ICS: 1 - WTIME_AQM_ICS: 00:30:00 - MAXTRIES_AQM_ICS: 2 - -#---------------------------- -# AQM_LBCS config parameters -#----------------------------- -task_aqm_lbcs: - TN_AQM_LBCS: "aqm_lbcs" - NNODES_AQM_LBCS: 1 - PPN_AQM_LBCS: 128 - WTIME_AQM_LBCS: 00:30:00 - MAXTRIES_AQM_LBCS: 1 - -#---------------------------- -# NEXUS_GFS_SFC config parameters -#----------------------------- -task_nexus_gfs_sfc: - TN_NEXUS_GFS_SFC: "nexus_gfs_sfc" - NNODES_NEXUS_GFS_SFC: 1 - PPN_NEXUS_GFS_SFC: 1 - MEM_NEXUS_GFS_SFC: 2G - WTIME_NEXUS_GFS_SFC: 00:30:00 - MAXTRIES_NEXUS_GFS_SFC: 2 - NEXUS_GFS_SFC_OFFSET_HRS: 0 - -#---------------------------- -# NEXUS_EMISSION config parameters -#----------------------------- -task_nexus_emission: - TN_NEXUS_EMISSION: "nexus_emission" - NNODES_NEXUS_EMISSION: 4 - PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}' - WTIME_NEXUS_EMISSION: 01:00:00 - MAXTRIES_NEXUS_EMISSION: 2 - KMP_AFFINITY_NEXUS_EMISSION: "scatter" - OMP_NUM_THREADS_NEXUS_EMISSION: 2 - OMP_STACKSIZE_NEXUS_EMISSION: "1024m" - -#---------------------------- -# NEXUS_POST_SPLIT config parameters -#----------------------------- -task_nexus_post_split: - TN_NEXUS_POST_SPLIT: "nexus_post_split" - NNODES_NEXUS_POST_SPLIT: 1 - PPN_NEXUS_POST_SPLIT: 1 - WTIME_NEXUS_POST_SPLIT: 00:30:00 - MAXTRIES_NEXUS_POST_SPLIT: 2 - -#---------------------------- -# FIRE_EMISSION config parameters -#----------------------------- -task_fire_emission: - TN_FIRE_EMISSION: "fire_emission" - NNODES_FIRE_EMISSION: 1 - PPN_FIRE_EMISSION: 1 - MEM_FIRE_EMISSION: 24G - WTIME_FIRE_EMISSION: 00:30:00 - MAXTRIES_FIRE_EMISSION: 2 - AQM_FIRE_FILE_OFFSET_HRS: 0 - -#---------------------------- -# POINT_SOURCE config parameters -#----------------------------- -task_point_source: - TN_POINT_SOURCE: "point_source" - NNODES_POINT_SOURCE: 1 - PPN_POINT_SOURCE: 1 - WTIME_POINT_SOURCE: 01:00:00 - MAXTRIES_POINT_SOURCE: 2 - -#---------------------------- -# PRE_POST_STAT config parameters -#----------------------------- -task_pre_post_stat: - TN_PRE_POST_STAT: "pre_post_stat" - NNODES_PRE_POST_STAT: 1 - PPN_PRE_POST_STAT: 1 - WTIME_PRE_POST_STAT: 00:30:00 - MAXTRIES_PRE_POST_STAT: 2 - -#---------------------------- -# POST_STAT_O3 config parameters -#----------------------------- -task_post_stat_o3: - TN_POST_STAT_O3: "post_stat_o3" - NNODES_POST_STAT_O3: 1 - PPN_POST_STAT_O3: 1 - MEM_POST_STAT_O3: 120G - WTIME_POST_STAT_O3: 00:30:00 - MAXTRIES_POST_STAT_O3: 2 - KMP_AFFINITY_POST_STAT_O3: "scatter" - OMP_NUM_THREADS_POST_STAT_O3: 1 - OMP_STACKSIZE_POST_STAT_O3: "2056M" - -#---------------------------- -# POST_STAT_PM25 config parameters -#----------------------------- -task_post_stat_pm25: - TN_POST_STAT_PM25: "post_stat_pm25" - NNODES_POST_STAT_PM25: 1 - PPN_POST_STAT_PM25: 1 - MEM_POST_STAT_PM25: 120G - WTIME_POST_STAT_PM25: 00:30:00 - MAXTRIES_POST_STAT_PM25: 2 - KMP_AFFINITY_POST_STAT_PM25: "scatter" - OMP_NUM_THREADS_POST_STAT_PM25: 1 - OMP_STACKSIZE_POST_STAT_PM25: "2056M" - -#---------------------------- -# BIAS_CORRECTION_O3 config parameters -#----------------------------- -task_bias_correction_o3: - TN_BIAS_CORRECTION_O3: "bias_correction_o3" - NNODES_BIAS_CORRECTION_O3: 1 - PPN_BIAS_CORRECTION_O3: 1 - MEM_BIAS_CORRECTION_O3: 120G - WTIME_BIAS_CORRECTION_O3: 00:30:00 - MAXTRIES_BIAS_CORRECTION_O3: 2 - KMP_AFFINITY_BIAS_CORRECTION_O3: "scatter" - OMP_NUM_THREADS_BIAS_CORRECTION_O3: 32 - OMP_STACKSIZE_BIAS_CORRECTION_O3: "2056M" - -#---------------------------- -# BIAS_CORRECTION_PM25 config parameters -#----------------------------- -task_bias_correction_pm25: - TN_BIAS_CORRECTION_PM25: "bias_correction_pm25" - NNODES_BIAS_CORRECTION_PM25: 1 - PPN_BIAS_CORRECTION_PM25: 1 - MEM_BIAS_CORRECTION_PM25: 120G - WTIME_BIAS_CORRECTION_PM25: 00:30:00 - MAXTRIES_BIAS_CORRECTION_PM25: 2 - KMP_AFFINITY_BIAS_CORRECTION_PM25: "scatter" - OMP_NUM_THREADS_BIAS_CORRECTION_PM25: 32 - OMP_STACKSIZE_BIAS_CORRECTION_PM25: "2056M" - -#---------------------------- -# global config parameters -#----------------------------- -global: - # - #----------------------------------------------------------------------- - # - # Set parameters associated with outputting satellite fields in the UPP - # grib2 files using the Community Radiative Transfer Model (CRTM). - # - # USE_CRTM: - # Flag that defines whether external CRTM coefficient files have been - # staged by the user in order to output synthetic satellite products - # available within the UPP. If this is set to true, then the workflow - # will check for these files in the directory CRTM_DIR. Otherwise, it is - # assumed that no satellite fields are being requested in the UPP - # configuration. - # - # CRTM_DIR: - # This is the path to the top CRTM fix file directory. This is only used - # if USE_CRTM is set to true. - # - #----------------------------------------------------------------------- - # - USE_CRTM: false - CRTM_DIR: "" - # - #----------------------------------------------------------------------- - # - # Set parameters associated with running ensembles. Definitions: - # - # DO_ENSEMBLE: - # Flag that determines whether to run a set of ensemble forecasts (for - # each set of specified cycles). If this is set to true, NUM_ENS_MEMBERS - # forecasts are run for each cycle, each with a different set of stochastic - # seed values. Otherwise, a single forecast is run for each cycle. - # - # NUM_ENS_MEMBERS: - # The number of ensemble members to run if DO_ENSEMBLE is set to true. - # This variable also controls the naming of the ensemble member directories. - # For example, if this is set to 8, the member directories will be named - # mem1, mem2, ..., mem8. Not used if DO_ENSEMBLE is set to false. - # - # ENSMEM_NAMES: - # A list of names for the ensemble member names following the format - # mem001, mem002, etc. - # - # FV3_NML_ENSMEM_FPS: - # Paths to the ensemble member corresponding namelists in the - # experiment directory - # - # ENS_TIME_LAG_HRS: - # Time lag (in hours) to use for each ensemble member. - # - #----------------------------------------------------------------------- - # - DO_ENSEMBLE: false - NUM_ENS_MEMBERS: 0 - ENSMEM_NAMES: '{% for m in range(NUM_ENS_MEMBERS) %} "mem%03d, " % m {% endfor %}' - FV3_NML_ENSMEM_FPS: '{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, "%s_%s" % FV3_NML_FN, mem]|path_join }}{% endfor %}' - ENS_TIME_LAG_HRS: '[ {% for m in range(NUM_ENS_MEMBERS) %} 0, {% endfor %} ]' - # - #----------------------------------------------------------------------- - # - # Set default ad-hoc stochastic physics options. - # For detailed documentation of these parameters, see: - # https://stochastic-physics.readthedocs.io/en/ufs_public_release/namelist_options.html - # - #----------------------------------------------------------------------- - # - DO_SHUM: false - DO_SPPT: false - DO_SKEB: false - ISEED_SPPT: 1 - ISEED_SHUM: 2 - ISEED_SKEB: 3 - NEW_LSCALE: true - SHUM_MAG: 0.006 #Variable "shum" in input.nml - SHUM_LSCALE: 150000 - SHUM_TSCALE: 21600 #Variable "shum_tau" in input.nml - SHUM_INT: 3600 #Variable "shumint" in input.nml - SPPT_MAG: 0.7 #Variable "sppt" in input.nml - SPPT_LOGIT: true - SPPT_LSCALE: 150000 - SPPT_TSCALE: 21600 #Variable "sppt_tau" in input.nml - SPPT_INT: 3600 #Variable "spptint" in input.nml - SPPT_SFCLIMIT: true - SKEB_MAG: 0.5 #Variable "skeb" in input.nml - SKEB_LSCALE: 150000 - SKEB_TSCALE: 21600 #Variable "skeb_tau" in input.nml - SKEB_INT: 3600 #Variable "skebint" in input.nml - SKEBNORM: 1 - SKEB_VDOF: 10 - USE_ZMTNBLCK: false - # - #----------------------------------------------------------------------- - # - # Set default SPP stochastic physics options. Each SPP option is an array, - # applicable (in order) to the scheme/parameter listed in SPP_VAR_LIST. - # Enter each value of the array in config.yaml as shown below without commas - # or single quotes (e.g., SPP_VAR_LIST=( "pbl" "sfc" "mp" "rad" "gwd" ). - # Both commas and single quotes will be added by Jinja when creating the - # namelist. - # - # Note that SPP is currently only available for specific physics schemes - # used in the RAP/HRRR physics suite. Users need to be aware of which SDF - # is chosen when turning this option on. - # - # Patterns evolve and are applied at each time step. - # - #----------------------------------------------------------------------- - # - DO_SPP: false - SPP_VAR_LIST: [ "pbl", "sfc", "mp", "rad", "gwd" ] - SPP_MAG_LIST: [ 0.2, 0.2, 0.75, 0.2, 0.2 ] #Variable "spp_prt_list" in input.nml - SPP_LSCALE: [ 150000.0, 150000.0, 150000.0, 150000.0, 150000.0 ] - SPP_TSCALE: [ 21600.0, 21600.0, 21600.0, 21600.0, 21600.0 ] #Variable "spp_tau" in input.nml - SPP_SIGTOP1: [ 0.1, 0.1, 0.1, 0.1, 0.1 ] - SPP_SIGTOP2: [ 0.025, 0.025, 0.025, 0.025, 0.025 ] - SPP_STDDEV_CUTOFF: [ 1.5, 1.5, 2.5, 1.5, 1.5 ] - ISEED_SPP: [ 4, 5, 6, 7, 8 ] - # - #----------------------------------------------------------------------- - # - # Turn on SPP in Noah or RUC LSM (support for Noah MP is in progress). - # Please be aware of the SDF that you choose if you wish to turn on LSM - # SPP. - # - # SPP in LSM schemes is handled in the &nam_sfcperts namelist block - # instead of in &nam_sppperts, where all other SPP is implemented. - # - # Perturbations to soil moisture content (SMC) are only applied at the - # first time step. - # - # LSM perturbations include SMC - soil moisture content (volume fraction), - # VGF - vegetation fraction, ALB - albedo, SAL - salinity, - # EMI - emissivity, ZOL - surface roughness (cm), and STC - soil temperature. - # - # Only five perturbations at a time can be applied currently, but all seven - # are shown below. In addition, only one unique iseed value is allowed - # at the moment, and is used for each pattern. - # - DO_LSM_SPP: false #If true, sets lndp_type=2 - LSM_SPP_TSCALE: [ 21600, 21600, 21600, 21600, 21600, 21600, 21600 ] - LSM_SPP_LSCALE: [ 150000, 150000, 150000, 150000, 150000, 150000, 150000 ] - ISEED_LSM_SPP: [ 9 ] - LSM_SPP_VAR_LIST: [ "smc", "vgf", "alb", "sal", "emi", "zol", "stc" ] - LSM_SPP_MAG_LIST: [ 0.017, 0.001, 0.001, 0.001, 0.001, 0.001, 0.2 ] - # - #----------------------------------------------------------------------- - # - # HALO_BLEND: - # Number of rows into the computational domain that should be blended - # with the LBCs. To shut halo blending off, this can be set to zero. - # - #----------------------------------------------------------------------- - # - HALO_BLEND: 10 - # - #----------------------------------------------------------------------- - # - -#---------------------------- -# verification (vx) parameters -#----------------------------- -verification: - # Move some of the following to another section at some point. - # - # GET_OBS_LOCAL_MODULE_FN: - # Local task modulefile name for all GET_OBS_* tasks. - # - GET_OBS_LOCAL_MODULE_FN: 'get_obs' - # - # Templates for CCPA observation files. - # - OBS_CCPA_APCP01h_FN_TEMPLATE: '{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2' - OBS_CCPA_APCPgt01h_FN_TEMPLATE: '${OBS_CCPA_APCP01h_FN_TEMPLATE}_a${ACCUM_HH}h.nc' - OBS_NDAS_SFCorUPA_FN_TEMPLATE: 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' - OBS_NDAS_SFCorUPA_FN_METPROC_TEMPLATE: '${OBS_NDAS_SFCorUPA_FN_TEMPLATE}.nc' - # - # VX_LOCAL_MODULE_FN: - # Name (without extension) of the local module file for running the vx - # tasks in the workflow. - # - VX_LOCAL_MODULE_FN: 'run_vx' - # - # RUN_TASKS_METVX_DET: - # Flag that specifies whether to run deterministic verification. If set - # to True, this will run deterministic vx on the post-processed forecast - # output. This post-processed output may consist of a single forecast - # or an ensemble of foreasts, and it may be staged from previous runs of - # the SRW App or may be generated by running the TN_RUN_FCST task as part - # of the current SRW-App-generated experiment. - # - # RUN_TASKS_METVX_ENS: - # Flag that specifies whether to run ensemble verification. The ensemble - # forecast output on which vx will be run may be staged or generated by - # running an ensemble of forecasts with the weather model as part of the - # current SRW-App-generated experiment. - # - RUN_TASKS_METVX_DET: False - RUN_TASKS_METVX_ENS: False - # - # VX_FCST_MODEL_NAME: - # String that specifies a descriptive name for the model being verified. - # This is used in forming the names of the verification output files as - # well as in the contents of those files. - # - # VX_FIELDS: - # The fields or groups of fields on which to run verification. - # - # VX_APCP_ACCUMS_HH: - # The 2-digit accumulation periods (in units of hours) to consider for - # APCP (accumulated precipitation). If VX_FIELDS contains "APCP", then - # VX_APCP_ACCUMS_HH must contain at least one element. If not, - # VX_APCP_ACCUMS_HH will be ignored. - # - VX_FCST_MODEL_NAME: '{{ nco.NET }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}' - VX_FIELDS: [ "APCP", "REFC", "RETOP", "SFC", "UPA" ] - VX_APCP_ACCUMS_HH: [ "01", "03", "06", "24" ] - # - # VX_FCST_INPUT_BASEDIR: - # Location of top-level directory containing forecast (but not obs) files - # that will be used as input into METplus for verification. If not - # specified, this gets set to EXPTDIR. - # - # VX_OUTPUT_BASEDIR: - # Top-level directory in which METplus will place its output. - # - VX_FCST_INPUT_BASEDIR: '{{ workflow.EXPTDIR if ((workflow_switches.RUN_TASK_RUN_FCST and task_run_fcst.WRITE_DOPOST) or workflow_switches.RUN_TASK_RUN_POST) }}' - VX_OUTPUT_BASEDIR: '{{ workflow.EXPTDIR }}' - # - # File name and path templates are used in the verification tasks. - # - FCST_SUBDIR_TEMPLATE: '{init?fmt=%Y%m%d%H?shift=-${time_lag}}${SLASH_ENSMEM_SUBDIR_OR_NULL}/postprd' - FCST_FN_TEMPLATE: '${NET}.t{init?fmt=%H?shift=-${time_lag}}z.prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2' - FCST_FN_METPROC_TEMPLATE: '${NET}.t{init?fmt=%H}z.prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_a${ACCUM_HH}h.nc' - # - # For verification tasks that need observational data, this specifies - # the maximum number of observation files that may be missing. If more - # than this number are missing, the verification task will error out. - # - # Note that this is a crude way of checking that there are enough obs to - # conduct verification since this number should probably depend on the - # field being verified, the time interval between observations, the - # length of the forecast, etc. An alternative may be to specify the - # maximum allowed fraction of obs files that can be missing (i.e. the - # number missing divided by the number that are expected to exist). - # - NUM_MISSING_OBS_FILES_MAX: 2 - -#---------------------------- -# CPL_AQM config parameters -#----------------------------- -cpl_aqm_parm: - # - #----------------------------------------------------------------------- - # - # CPL_AQM: - # Coupling flag for air quality modeling - # - # DO_AQM_DUST: - # Flag turning on/off AQM dust option in AQM_RC - # - # DO_AQM_CANOPY - # Flag turning on/off AQM canopy option in AQM_RC - # - # DO_AQM_PRODUCT - # Flag turning on/off AQM output products in AQM_RC - # - # DO_AQM_CHEM_LBCS: - # Add chemical LBCs to chemical LBCs - # - # DO_AQM_GEFS_LBCS: - # Add GEFS aerosol LBCs to chemical LBCs - # - # DO_AQM_SAVE_AIRNOW_HIST: - # Save bias-correction airnow training data - # - # DO_AQM_SAVE_FIRE: - # Archive fire emission file to HPSS - # - # FIXaqmconfig: - # Configuration directory for AQM - # - # FIXaqmbio: - # Path to the directory containing AQM bio files - # - # AQM_BIO_FILE: - # File name of AQM BIO file - # - # FIXaqmdust: - # Path to the directory containing AQM dust file - # - # AQM_DUST_FILE_PREFIX: - # Frefix of AQM dust file - # - # AQM_DUST_FILE_SUFFIX: - # Suffix and extension of AQM dust file - # - # FIXaqmcanopy: - # Path to the directory containing AQM canopy files - # - # AQM_CANOPY_FILE_PREFIX: - # File name of AQM canopy file - # - # AQM_CANOPY_FILE_SUFFIX: - # Suffix and extension of AQM CANOPY file - # - # DCOMINfire: - # Path to the directory containing AQM fire emission files (RAVE) - # - # AQM_FIRE_FILE_PREFIX: - # Prefix of AQM FIRE file - # - # AQM_FIRE_FILE_SUFFIX: - # Suffix and extension of AQM FIRE file - # - # AQM_FIRE_ARCHV_DIR: - # Path to the archive directory for RAVE emission files on HPSS - # - # AQM_RC_FIRE_FREQUENCY: - # Fire frequency in aqm.rc - # - # AQM_RC_PRODUCT_FN: - # File name of AQM output products - # - # AQM_RC_PRODUCT_FREQUENCY: - # Frequency of AQM output products - # - # FIXaqmchem_lbcs: - # Path to the directory containing chemical LBC files - # - # AQM_LBCS_FILES: - # File name of chemical LBCs - # - # DCOMINgefs: - # Path to the directory containing GEFS aerosol LBC files - # - # AQM_GEFS_FILE_PREFIX: - # Prefix of AQM GEFS file ("geaer" or "gfs") - # - # AQM_GEFS_FILE_CYC: - # Cycle of the GEFS aerosol LBC files only if it is fixed - # - # COMINemis: - # Same as GRID_DIR but for the the air quality emission generation task. - # Should be blank for the default value specified in setup.sh - # - # FIXaqmnexus: - # Directory containing grid_spec files as the input file of nexus - # - # FIXaqmfire: - # Directory containing climatology fire emissions - # - # NEXUS_GRID_FN: - # File name of the input grid_spec file of nexus - # - # NUM_SPLIT_NEXUS: - # Number of split nexus emission tasks - # - # NEXUS_GFS_SFC_OFFSET_HRS: 0 - # Time offset when retrieving gfs surface data files - # - # FIXaqmnexus_gfs_sfc: - # Path to directory containing GFS surface data files - # This is set to COMINgfs when DO_REAL_TIME=TRUE. - # - # NEXUS_GFS_SFC_ARCHV_DIR: - # Path to archive directory for gfs surface files on HPSS - # - # COMINemispt: - # Path to the directory containing point source files - # - # DCOMINairnow: - # Path to the directory containing AIRNOW observation data - # - # COMINbicor: - # Path of reading in historical training data for biascorrection - # - # COMOUTbicor: - # Path to save the current cycle's model output and AirNow obs as training data for future use - # $COMINbicor and $COMOUTbicor can be distuigshed by the ${yyyy}${mm}$dd under the same location - #----------------------------------------------------------------------- - # - CPL_AQM: false - - DO_AQM_DUST: true - DO_AQM_CANOPY: false - DO_AQM_PRODUCT: true - DO_AQM_CHEM_LBCS: true - DO_AQM_GEFS_LBCS: false - DO_AQM_SAVE_AIRNOW_HIST: false - DO_AQM_SAVE_FIRE: false - - AQM_BIO_FILE: "BEIS_SARC401.ncf" - - AQM_DUST_FILE_PREFIX: "FENGSHA_p8_10km_inputs" - AQM_DUST_FILE_SUFFIX: ".nc" - - AQM_CANOPY_FILE_PREFIX: "gfs.t12z.geo" - AQM_CANOPY_FILE_SUFFIX: ".canopy_regrid.nc" - - DCOMINfire: "" - - AQM_FIRE_FILE_PREFIX: "GBBEPx_C401GRID.emissions_v003" - AQM_FIRE_FILE_SUFFIX: ".nc" - AQM_FIRE_ARCHV_DIR: "/path/to/archive/dir/for/RAVE/on/HPSS" - - AQM_RC_FIRE_FREQUENCY: "static" - AQM_RC_PRODUCT_FN: "aqm.prod.nc" - AQM_RC_PRODUCT_FREQUENCY: "hourly" - - AQM_LBCS_FILES: "am4_bndy_c793.2019.v1.nc" - - COMINgefs: "" - AQM_GEFS_FILE_PREFIX: "geaer" - AQM_GEFS_FILE_CYC: "" - - COMINemis: "" - FIXaqmconfig: '{{ [user.FIXaqm, "aqm/epa/data"]|path_join }}' - FIXaqmfire: '{{ [user.FIXaqm, "fire"]|path_join }}' - FIXaqmbio: '{{ [user.FIXaqm, "bio"]|path_join }}' - FIXaqmdust: '{{ [user.FIXaqm, "FENGSHA"]|path_join }}' - FIXaqmcanopy: '{{ [user.FIXaqm, "canopy"]|path_join }}' - FIXaqmchem_lbcs: '{{ [user.FIXaqm, "chem_lbcs"]|path_join }}' - FIXaqmnexus: '{{ [user.FIXaqm, "nexus"]|path_join }}' - FIXaqmnexus_gfs_sfc: '{{ [user.FIXaqm, "gfs"]|path_join }}' - - NEXUS_GRID_FN: "grid_spec_GSD_HRRR_25km.nc" - NUM_SPLIT_NEXUS: 3 - NEXUS_GFS_SFC_ARCHV_DIR: "/NCEPPROD/hpssprod/runhistory" - - COMINemispt: "/path/to/point/source/base/directory" - - DCOMINairnow: "/path/to/airnow/obaservation/data" - COMINbicor: "/path/to/historical/airnow/data/dir" - COMOUTbicor: "/path/to/historical/airnow/data/dir" diff --git a/ush/constants.yaml b/ush/constants.yaml deleted file mode 100755 index d1690d6f43..0000000000 --- a/ush/constants.yaml +++ /dev/null @@ -1,71 +0,0 @@ -constants: - # - #----------------------------------------------------------------------- - # - # Mathematical and physical constants. - # - #----------------------------------------------------------------------- - # - PI_GEOM: 3.14159265358979323846264338327 - DEGS_PER_RADIAN: 57.29577951308232087679 - RADIUS_EARTH: 6371200.0 - SECS_PER_HOUR: 3600.0 - # - #----------------------------------------------------------------------- - # - # Any regional model must be supplied lateral boundary conditions (in - # addition to initial conditions) to be able to perform a forecast. In - # the FV3-LAM model, these boundary conditions (BCs) are supplied using - # a "halo" of grid cells around the regional domain that extend beyond - # the boundary of the domain. The model is formulated such that along - # with files containing these BCs, it needs as input the following files - # (in NetCDF format): - # - # 1) A grid file that includes a halo of 3 cells beyond the boundary of - # the domain. - # 2) A grid file that includes a halo of 4 cells beyond the boundary of - # the domain. - # 3) A (filtered) orography file without a halo, i.e. a halo of width - # 0 cells. - # 4) A (filtered) orography file that includes a halo of 4 cells beyond - # the boundary of the domain. - # - # Note that the regional grid is referred to as "tile 7" in the code. - # Therefore, we will set the constants to a regional, tile 7 with - # these config options: - # - # GTYPE: - # Grid type string, set to regional for SRW - # - # TILE_RGNL: 7 - # Tile number set to 7 for a regional grid in SRW - # - # We will let: - # - # * NH0 denote the width (in units of number of cells on tile 7) of - # the 0-cell-wide halo, i.e. NH0 = 0; - # - # * NH3 denote the width (in units of number of cells on tile 7) of - # the 3-cell-wide halo, i.e. NH3 = 3; and - # - # * NH4 denote the width (in units of number of cells on tile 7) of - # the 4-cell-wide halo, i.e. NH4 = 4. - # - # We define these variables next. - # - #----------------------------------------------------------------------- - # - GTYPE: regional - TILE_RGNL: 7 - NH0: 0 - NH3: 3 - NH4: 4 - # - #----------------------------------------------------------------------- - # - # Valid values that a user may set a boolean variable to (e.g. in the - # SRW App's experiment configuration file). - # - #----------------------------------------------------------------------- - # - valid_vals_BOOLEAN: [ "TRUE", "true", "YES", "yes", "FALSE", "false", "NO", "no" ] diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py deleted file mode 100755 index aca81bbf08..0000000000 --- a/ush/generate_FV3LAM_wflow.py +++ /dev/null @@ -1,963 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import subprocess -import unittest -import logging -from multiprocessing import Process -from textwrap import dedent -from datetime import datetime, timedelta - -from python_utils import ( - log_info, - import_vars, - export_vars, - load_config_file, - update_dict, - cp_vrfy, - ln_vrfy, - mkdir_vrfy, - mv_vrfy, - rm_vrfy, - run_command, - date_to_str, - define_macos_utilities, - create_symlink_to_file, - check_for_preexist_dir_file, - cfg_to_yaml_str, - find_pattern_in_str, - set_env_var, - get_env_var, - lowercase, - flatten_dict, -) - -from setup import setup -from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames -from get_crontab_contents import add_crontab_line -from fill_jinja_template import fill_jinja_template -from set_namelist import set_namelist -from check_python_version import check_python_version -from create_ecflow_scripts import create_ecflow_scripts - -def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> str: - """Function to setup a forecast experiment and create a workflow - (according to the parameters specified in the config file) - - Args: - ushdir (str) : The full path of the ush/ directory where this script is located - logfile (str) : The name of the file where logging is written - debug (bool): Enable extra output for debugging - Returns: - EXPTDIR (str) : The full path of the directory where this experiment has been generated - """ - - # Set up logging to write to screen and logfile - setup_logging(logfile, debug) - - # Check python version and presence of some non-standard packages - check_python_version() - - # Note start of workflow generation - log_info( - """ - ======================================================================== - Starting experiment generation... - ========================================================================""" - ) - - # The setup function reads the user configuration file and fills in - # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir,debug=debug) - - verbose = expt_config["workflow"]["VERBOSE"] - # - # ----------------------------------------------------------------------- - # - # Set the full path to the experiment's rocoto workflow xml file. This - # file will be placed at the top level of the experiment directory and - # then used by rocoto to run the workflow. - # - # ----------------------------------------------------------------------- - # - wflow_xml_fn = expt_config["workflow"]["WFLOW_XML_FN"] - wflow_xml_fp = os.path.join( - expt_config["workflow"]["EXPTDIR"], - wflow_xml_fn, - ) - # - # ----------------------------------------------------------------------- - # - # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the jinja variables in the template rocoto - # XML should be set to. These values are set either in the user-specified - # workflow configuration file (EXPT_CONFIG_FN) or in the setup() function - # called above. Then call the python script that generates the XML. - # - # ----------------------------------------------------------------------- - # - if expt_config["platform"]["WORKFLOW_MANAGER"] == "rocoto": - - template_xml_fp = os.path.join( - expt_config["user"]["PARMdir"], - wflow_xml_fn, - ) - - log_info( - f""" - Creating rocoto workflow XML file (WFLOW_XML_FP) from jinja template XML - file (template_xml_fp): - template_xml_fp = '{template_xml_fp}' - WFLOW_XML_FP = '{wflow_xml_fp}'""" - ) - - # - # Dictionary of settings to pass to fill_jinja - # - settings = {} - for k, v in flatten_dict(expt_config).items(): - settings[lowercase(k)] = v - - ensmem_indx_name = "" - uscore_ensmem_name = "" - slash_ensmem_subdir = "" - if expt_config["global"]["DO_ENSEMBLE"]: - ensmem_indx_name = "mem" - uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" - slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" - - dt_atmos = expt_config["task_run_fcst"]["DT_ATMOS"] - date_first_cycl = expt_config["workflow"]["DATE_FIRST_CYCL"] - date_last_cycl = expt_config["workflow"]["DATE_LAST_CYCL"] - first_file_time = date_first_cycl + timedelta(seconds=dt_atmos) - fcst_threads = expt_config["task_run_fcst"]["OMP_NUM_THREADS_RUN_FCST"] - - if date_first_cycl == date_last_cycl: - cycl_next = date_to_str(date_first_cycl, format="%Y%m%d%H00") - else: - cycl_next = date_to_str(date_first_cycl + timedelta(hours=expt_config['workflow']['INCR_CYCL_FREQ']), format="%Y%m%d%H00") - - incr_cycl_freq = expt_config["workflow"]["INCR_CYCL_FREQ"] - date_2nd_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq), format="%Y%m%d%H00") - date_3rd_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*2), format="%Y%m%d%H00") - date_4th_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*3), format="%Y%m%d%H00") - fcst_len_hrs = expt_config["workflow"]["FCST_LEN_HRS"] - fcst_len_cycl = expt_config["workflow"]["FCST_LEN_CYCL"] - num_fcst_len_cycl = len(fcst_len_cycl) - if fcst_len_hrs == -1: - num_cyc_days = (date_last_cycl - date_first_cycl).days - else: - num_cyc_days = 0 - - date_1st_last_cycl = date_to_str(date_first_cycl + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_2nd_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_3rd_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*2) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_4th_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*3) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - - settings.update( - { - # - # Number of cores used for a task - # - "ncores_run_fcst": expt_config["task_run_fcst"]["PE_MEMBER01"], - "native_run_fcst": f"--cpus-per-task {fcst_threads} --exclusive", - "native_nexus_emission": f"--cpus-per-task {expt_config['task_nexus_emission']['OMP_NUM_THREADS_NEXUS_EMISSION']}", - # - # Parameters that determine the set of cycles to run. - # - "date_first_cycl": date_to_str(date_first_cycl, format="%Y%m%d%H00"), - "date_last_cycl": date_to_str(date_last_cycl, format="%Y%m%d%H00"), - "cdate_first_cycl": date_first_cycl, - "cycl_freq": f"{expt_config['workflow']['INCR_CYCL_FREQ']:02d}:00:00", - "cycl_next": cycl_next, - "date_2nd_cycl": date_2nd_cycl, - "date_3rd_cycl": date_3rd_cycl, - "date_4th_cycl": date_4th_cycl, - "date_1st_last_cycl": date_1st_last_cycl, - "date_2nd_last_cycl": date_2nd_last_cycl, - "date_3rd_last_cycl": date_3rd_last_cycl, - "date_4th_last_cycl": date_4th_last_cycl, - "fcst_len_hrs": fcst_len_hrs, - "fcst_len_cycl": fcst_len_cycl, - "num_fcst_len_cycl": num_fcst_len_cycl, - # - # Ensemble-related parameters. - # - "ensmem_indx_name": ensmem_indx_name, - "uscore_ensmem_name": uscore_ensmem_name, - "slash_ensmem_subdir": slash_ensmem_subdir, - # - # Parameters associated with subhourly post-processed output - # - "delta_min": expt_config["task_run_post"]["DT_SUBHOURLY_POST_MNTS"], - "first_fv3_file_tstr": first_file_time.strftime("000:%M:%S"), - } - ) - - # Log "settings" variable. - settings_str = cfg_to_yaml_str(settings) - - log_info( - f""" - The variable 'settings' specifying values of the rococo XML variables - has been set as follows: - #----------------------------------------------------------------------- - settings =\n\n""", - verbose=verbose, - ) - log_info(settings_str, verbose=verbose) - - # - # Call the python script to generate the experiment's actual XML file - # from the jinja template file. - # - try: - fill_jinja_template( - ["-q", "-u", settings_str, "-t", template_xml_fp, "-o", wflow_xml_fp] - ) - except: - logging.info( - dedent( - f""" - Variable settings specified on command line for - fill_jinja_template.py:\n - settings =\n\n""" - ) - + settings_str - ) - raise Exception( - dedent( - f""" - Call to python script fill_jinja_template.py to create a rocoto workflow - XML file from a template file failed. Parameters passed to this script - are: - Full path to template rocoto XML file: - template_xml_fp = '{template_xml_fp}' - Full path to output rocoto XML file: - WFLOW_XML_FP = '{wflow_xml_fp}' - """ - ) - ) - # - # ----------------------------------------------------------------------- - # - # Create a symlink in the experiment directory that points to the workflow - # (re)launch script. - # - # ----------------------------------------------------------------------- - # - exptdir = expt_config["workflow"]["EXPTDIR"] - wflow_launch_script_fp = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FP"] - wflow_launch_script_fn = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FN"] - log_info( - f""" - Creating symlink in the experiment directory (EXPTDIR) that points to the - workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): - EXPTDIR = '{exptdir}' - WFLOW_LAUNCH_SCRIPT_FP = '{wflow_launch_script_fp}'""", - verbose=verbose, - ) - - create_symlink_to_file( - wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False - ) - - elif expt_config["platform"]["WORKFLOW_MANAGER"] == "ecflow": - - global_var_defns_fp = expt_config["workflow"]["GLOBAL_VAR_DEFNS_FP"] - homeaqm = expt_config["user"]["HOMEaqm"] - home_ecf = f"{homeaqm}/ecf" - rm_vrfy("-rf",f"{home_ecf}") - - # create ecflow definition file and job cards - create_ecflow_scripts(global_var_defns_fp) - - # - # ----------------------------------------------------------------------- - # - # If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's - # cron table to call the (re)launch script every - # CRON_RELAUNCH_INTVL_MNTS minutes. - # - # ----------------------------------------------------------------------- - # - # From here on out, going back to setting variables for everything - # in the flattened expt_config dictionary - # TODO: Reference all these variables in their respective - # dictionaries, instead. - import_vars(dictionary=flatten_dict(expt_config)) - export_vars(source_dict=flatten_dict(expt_config)) - - if expt_config["platform"]["WORKFLOW_MANAGER"] == "rocoto": - if USE_CRON_TO_RELAUNCH: - add_crontab_line() - - # - # Copy or symlink fix files - # - if SYMLINK_FIX_FILES: - - log_info( - f""" - Symlinking fixed files from system directory (FIXaqm_sav) to a subdirectory (FIXaqm): - FIXaqm_sav = '{FIXaqm_sav}' - FIXaqm = '{FIXaqm}'""", - verbose=verbose, - ) - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fix_am' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/nexus' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fix_aer' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fix_lut' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fix_orog' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fix_sfc_climo' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/ufs' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/FENGSHA' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/chem_lbcs' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/aqm' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/fire' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/gfs' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/canopy' '{FIXaqm}'""") - ln_vrfy(f"""-fsn '{FIXaqm_sav}/bio' '{FIXaqm}'""") - #else: - - # log_info( - # f""" - # Copying fixed files from system directory (FIXgsm) to a subdirectory (FIXam): - # FIXgsm = '{FIXgsm}' - # FIXam = '{FIXam}'""", - # verbose=verbose, - # ) - - # check_for_preexist_dir_file(FIXam, "delete") - # mkdir_vrfy("-p", FIXam) - # mkdir_vrfy("-p", os.path.join(FIXam, "fix_co2_proj")) - - # num_files = len(FIXgsm_FILES_TO_COPY_TO_FIXam) - # for i in range(num_files): - # fn = f"{FIXgsm_FILES_TO_COPY_TO_FIXam[i]}" - # cp_vrfy(os.path.join(FIXgsm, fn), os.path.join(FIXam, fn)) - - # ----------------------------------------------------------------------- - # - # Copy MERRA2 aerosol climatology data. - # - # ----------------------------------------------------------------------- - # - if USE_MERRA_CLIMO: - log_info( - f""" - Copying MERRA2 aerosol climatology data files from system directory - (FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: - FIXaer = '{FIXaer}' - FIXlut = '{FIXlut}' - FIXclim = '{FIXclim}'""", - verbose=verbose, - ) - - check_for_preexist_dir_file(FIXclim, "delete") - mkdir_vrfy("-p", FIXclim) - - if SYMLINK_FIX_FILES: - ln_vrfy("-fsn", os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) - ln_vrfy("-fsn", os.path.join(FIXlut, "optics*.dat"), FIXclim) - else: - cp_vrfy(os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) - cp_vrfy(os.path.join(FIXlut, "optics*.dat"), FIXclim) - # - # ----------------------------------------------------------------------- - # - # Copy templates of various input files to the experiment directory. - # - # ----------------------------------------------------------------------- - # - log_info( - f""" - Copying templates of various input files to the experiment directory...""", - verbose=verbose, - ) - - log_info( - f""" - Copying the template data table file to the experiment directory...""", - verbose=verbose, - ) - cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) - - log_info( - f""" - Copying the template field table file to the experiment directory...""", - verbose=verbose, - ) - cp_vrfy(FIELD_TABLE_TMPL_FP, FIELD_TABLE_FP) - - # - # Copy the CCPP physics suite definition file from its location in the - # clone of the FV3 code repository to the experiment directory (EXPT- - # DIR). - # - log_info( - f""" - Copying the CCPP physics suite definition XML file from its location in - the forecast model directory structure to the experiment directory...""", - verbose=verbose, - ) - cp_vrfy(CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP) - # - # Copy the field dictionary file from its location in the - # clone of the FV3 code repository to the experiment directory (EXPT- - # DIR). - # - log_info( - f""" - Copying the field dictionary file from its location in the forecast - model directory structure to the experiment directory...""", - verbose=verbose, - ) - cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) - # - # ----------------------------------------------------------------------- - # - # Set parameters in the FV3-LAM namelist file. - # - # ----------------------------------------------------------------------- - # - log_info( - f""" - Setting parameters in weather model's namelist file (FV3_NML_FP): - FV3_NML_FP = '{FV3_NML_FP}'""" - ) - # - # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. - # These need to be set in the FV3-LAM Fortran namelist file. They represent - # the number of cell vertices in the x and y directions on the regional - # grid. - # - npx = NX + 1 - npy = NY + 1 - # - # For the physics suites that use RUC LSM, set the parameter kice to 9, - # Otherwise, leave it unspecified (which means it gets set to the default - # value in the forecast model). - # - kice = None - if SDF_USES_RUC_LSM: - kice = 9 - # - # Set lsoil, which is the number of input soil levels provided in the - # chgres_cube output NetCDF file. This is the same as the parameter - # nsoill_out in the namelist file for chgres_cube. [On the other hand, - # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or - # FV3.input.yml) is the number of soil levels that the LSM scheme in the - # forecast model will run with.] Here, we use the same approach to set - # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. - # See that script for details. - # - # NOTE: - # May want to remove lsoil from FV3.input.yml (and maybe input.nml.FV3). - # Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. - # - lsoil = 4 - if (EXTRN_MDL_NAME_ICS == "HRRR" or EXTRN_MDL_NAME_ICS == "RAP") and ( - SDF_USES_RUC_LSM - ): - lsoil = 9 - if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": - lsoil = "" - # - # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the namelist variables that are physics- - # suite-independent need to be set to. Below, this variable will be - # passed to a python script that will in turn set the values of these - # variables in the namelist file. - # - # IMPORTANT: - # If we want a namelist variable to be removed from the namelist file, - # in the "settings" variable below, we need to set its value to the - # string "null". This is equivalent to setting its value to - # !!python/none - # in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the - # suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. - # - # It turns out that setting the variable to an empty string also works - # to remove it from the namelist! Which is better to use?? - # - settings = {} - settings["atmos_model_nml"] = { - "blocksize": BLOCKSIZE, - "ccpp_suite": CCPP_PHYS_SUITE, - } - - fv_core_nml_dict = {} - fv_core_nml_dict.update({ - "target_lon": LON_CTR, - "target_lat": LAT_CTR, - "nrows_blend": HALO_BLEND, - # - # Question: - # For a ESGgrid type grid, what should stretch_fac be set to? This depends - # on how the FV3 code uses the stretch_fac parameter in the namelist file. - # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) - # to something like 0.9999, but is it ok to set it to that here in the - # FV3 namelist file? - # - "stretch_fac": STRETCH_FAC, - "npx": npx, - "npy": npy, - "layout": [LAYOUT_X, LAYOUT_Y], - "bc_update_interval": LBC_SPEC_INTVL_HRS, - }) - if ( CCPP_PHYS_SUITE == "FV3_GFS_2017_gfdl_mp" or - CCPP_PHYS_SUITE == "FV3_GFS_2017_gfdlmp_regional" or - CCPP_PHYS_SUITE == "FV3_GFS_v15p2" ): - if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 5 - }) - else: - fv_core_nml_dict.update({ - "dnats": 1 - }) - elif CCPP_PHYS_SUITE == "FV3_GFS_v16": - if CPL_AQM: - fv_core_nml_dict.update({ - "hord_tr": 8, - "dnats": 5, - "nord": 2 - }) - else: - fv_core_nml_dict.update({ - "dnats": 1 - }) - elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": - if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 4 - }) - else: - fv_core_nml_dict.update({ - "dnats": 0 - }) - - settings["fv_core_nml"] = fv_core_nml_dict - - gfs_physics_nml_dict = {} - gfs_physics_nml_dict.update({ - "kice": kice or None, - "lsoil": lsoil or None, - "do_shum": DO_SHUM, - "do_sppt": DO_SPPT, - "do_skeb": DO_SKEB, - "do_spp": DO_SPP, - "n_var_spp": N_VAR_SPP, - "n_var_lndp": N_VAR_LNDP, - "lndp_type": LNDP_TYPE, - "fhcyc": FHCYC_LSM_SPP_OR_NOT, - }) - if CPL_AQM: - gfs_physics_nml_dict.update({ - "cplaqm": True, - "cplocn2atm": False, - "fscav_aero": ["aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", - "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", - "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", - "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", - "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", - "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", - "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", - "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", - "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", - "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", - "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", - "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", - "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", - "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", - "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", - "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", - "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] - }) - settings["gfs_physics_nml"] = gfs_physics_nml_dict - - # - # Add to "settings" the values of those namelist variables that specify - # the paths to fixed files in the FIXam directory. As above, these namelist - # variables are physcs-suite-independent. - # - # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains - # the mapping between the namelist variables and the names of the files - # in the FIXam directory. Here, we loop through this array and process - # each element to construct each line of "settings". - # - dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") - if DO_ENSEMBLE: - dummy_run_dir = os.path.join(dummy_run_dir, "any_ensmem") - - regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" - num_nml_vars = len(FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING) - namsfc_dict = {} - for i in range(num_nml_vars): - - mapping = f"{FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[i]}" - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - FIXam_fn = tup[1] - - fp = '""' - if FIXam_fn: - fp = os.path.join(FIXam, FIXam_fn) - # - # Add a line to the variable "settings" that specifies (in a yaml-compliant - # format) the name of the current namelist variable and the value it should - # be set to. - # - namsfc_dict[nml_var_name] = fp - # - # Add namsfc_dict to settings - # - settings["namsfc"] = namsfc_dict - # - # Use netCDF4 when running the North American 3-km domain due to file size. - # - if PREDEF_GRID_NAME == "RRFS_NA_3km": - settings["fms2_io_nml"] = {"netcdf_default_format": "netcdf4"} - # - # Add the relevant tendency-based stochastic physics namelist variables to - # "settings" when running with SPPT, SHUM, or SKEB turned on. If running - # with SPP or LSM SPP, set the "new_lscale" variable. Otherwise only - # include an empty "nam_stochy" stanza. - # - nam_stochy_dict = {} - if DO_SPPT: - nam_stochy_dict.update( - { - "iseed_sppt": ISEED_SPPT, - "new_lscale": NEW_LSCALE, - "sppt": SPPT_MAG, - "sppt_logit": SPPT_LOGIT, - "sppt_lscale": SPPT_LSCALE, - "sppt_sfclimit": SPPT_SFCLIMIT, - "sppt_tau": SPPT_TSCALE, - "spptint": SPPT_INT, - "use_zmtnblck": USE_ZMTNBLCK, - } - ) - - if DO_SHUM: - nam_stochy_dict.update( - { - "iseed_shum": ISEED_SHUM, - "new_lscale": NEW_LSCALE, - "shum": SHUM_MAG, - "shum_lscale": SHUM_LSCALE, - "shum_tau": SHUM_TSCALE, - "shumint": SHUM_INT, - } - ) - - if DO_SKEB: - nam_stochy_dict.update( - { - "iseed_skeb": ISEED_SKEB, - "new_lscale": NEW_LSCALE, - "skeb": SKEB_MAG, - "skeb_lscale": SKEB_LSCALE, - "skebnorm": SKEBNORM, - "skeb_tau": SKEB_TSCALE, - "skebint": SKEB_INT, - "skeb_vdof": SKEB_VDOF, - } - ) - - if DO_SPP or DO_LSM_SPP: - nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) - - settings["nam_stochy"] = nam_stochy_dict - # - # Add the relevant SPP namelist variables to "settings" when running with - # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. - # - nam_sppperts_dict = {} - if DO_SPP: - nam_sppperts_dict = { - "iseed_spp": ISEED_SPP, - "spp_lscale": SPP_LSCALE, - "spp_prt_list": SPP_MAG_LIST, - "spp_sigtop1": SPP_SIGTOP1, - "spp_sigtop2": SPP_SIGTOP2, - "spp_stddev_cutoff": SPP_STDDEV_CUTOFF, - "spp_tau": SPP_TSCALE, - "spp_var_list": SPP_VAR_LIST, - } - - settings["nam_sppperts"] = nam_sppperts_dict - # - # Add the relevant LSM SPP namelist variables to "settings" when running with - # LSM SPP turned on. - # - nam_sfcperts_dict = {} - if DO_LSM_SPP: - nam_sfcperts_dict = { - "lndp_type": LNDP_TYPE, - "lndp_model_type": LNDP_MODEL_TYPE, - "lndp_tau": LSM_SPP_TSCALE, - "lndp_lscale": LSM_SPP_LSCALE, - "iseed_lndp": ISEED_LSM_SPP, - "lndp_var_list": LSM_SPP_VAR_LIST, - "lndp_prt_list": LSM_SPP_MAG_LIST, - } - - settings["nam_sfcperts"] = nam_sfcperts_dict - - settings_str = cfg_to_yaml_str(settings) - - log_info( - f""" - The variable 'settings' specifying values of the weather model's - namelist variables has been set as follows:\n""", - verbose=verbose, - ) - log_info("\nsettings =\n\n" + settings_str, verbose=verbose) - # - # ----------------------------------------------------------------------- - # - # Call the set_namelist.py script to create a new FV3 namelist file (full - # path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as - # the base (i.e. starting) namelist file, with physics-suite-dependent - # modifications to the base file specified in the yaml configuration file - # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), - # and with additional physics-suite-independent modifications specified - # in the variable "settings" set above. - # - # ----------------------------------------------------------------------- - # - try: - set_namelist( - [ - "-q", - "-n", - FV3_NML_BASE_SUITE_FP, - "-c", - FV3_NML_YAML_CONFIG_FP, - CCPP_PHYS_SUITE, - "-u", - settings_str, - "-o", - FV3_NML_FP, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_BASE_SUITE_FP = '{FV3_NML_BASE_SUITE_FP}' - Full path to yaml configuration file for various physics suites: - FV3_NML_YAML_CONFIG_FP = '{FV3_NML_YAML_CONFIG_FP}' - Physics suite to extract from yaml configuration file: - CCPP_PHYS_SUITE = '{CCPP_PHYS_SUITE}' - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - # - # If not running the TN_MAKE_GRID task (which implies the workflow will - # use pregenerated grid files), set the namelist variables specifying - # the paths to surface climatology files. These files are located in - # (or have symlinks that point to them) in the FIXlam directory. - # - # Note that if running the TN_MAKE_GRID task, this action usually cannot - # be performed here but must be performed in that task because the names - # of the surface climatology files depend on the CRES parameter (which is - # the C-resolution of the grid), and this parameter is in most workflow - # configurations is not known until the grid is created. - # - if not RUN_TASK_MAKE_GRID: - - set_FV3nml_sfc_climo_filenames() - - # - # ----------------------------------------------------------------------- - # - # To have a record of how this experiment/workflow was generated, copy - # the experiment/workflow configuration file to the experiment directo- - # ry. - # - # ----------------------------------------------------------------------- - # - cp_vrfy(os.path.join(ushdir, EXPT_CONFIG_FN), EXPTDIR) - - # - # ----------------------------------------------------------------------- - # - # For convenience, print out the commands that need to be issued on the - # command line in order to launch the workflow and to check its status. - # Also, print out the line that should be placed in the user's cron table - # in order for the workflow to be continually resubmitted. - # - # ----------------------------------------------------------------------- - # - if WORKFLOW_MANAGER == "rocoto": - wflow_db_fn = f"{os.path.splitext(WFLOW_XML_FN)[0]}.db" - rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" - rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" - - log_info( - f""" - To launch the workflow, change location to the experiment directory - (EXPTDIR) and issue the rocotrun command, as follows: - - > cd {EXPTDIR} - > {rocotorun_cmd} - - To check on the status of the workflow, issue the rocotostat command - (also from the experiment directory): - - > {rocotostat_cmd} - - Note that: - - 1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next - task(s) to the queue. - - 2) In order for the output of the rocotostat command to be up-to-date, - the rocotorun command must be issued immediately before issuing the - rocotostat command. - - For automatic resubmission of the workflow (say every {CRON_RELAUNCH_INTVL_MNTS} minutes), the - following line can be added to the user's crontab (use 'crontab -e' to - edit the cron table): - - */{CRON_RELAUNCH_INTVL_MNTS} * * * * cd {EXPTDIR} && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" - """ - ) - - # If we got to this point everything was successful: move the log file to the experiment directory. - mv_vrfy(logfile, EXPTDIR) - - return EXPTDIR - - -def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None: - """ - Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all - messages with detailed timing and routine info in the specified text file. - - If debug = True, print all messages to both screen and log file. - """ - logging.getLogger().setLevel(logging.DEBUG) - - formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") - - fh = logging.FileHandler(logfile, mode='w') - fh.setLevel(logging.DEBUG) - fh.setFormatter(formatter) - logging.getLogger().addHandler(fh) - logging.debug(f"Finished setting up debug file logging in {logfile}") - - # If there are already multiple handlers, that means generate_FV3LAM_workflow was called from another function. - # In that case, do not change the console (print-to-screen) logging. - if len(logging.getLogger().handlers) > 1: - return - - console = logging.StreamHandler() - if debug: - console.setLevel(logging.DEBUG) - else: - console.setLevel(logging.INFO) - logging.getLogger().addHandler(console) - logging.debug("Logging set up successfully") - - -if __name__ == "__main__": - - USHdir = os.path.dirname(os.path.abspath(__file__)) - wflow_logfile = f"{USHdir}/log.generate_FV3LAM_wflow" - - # Call the generate_FV3LAM_wflow function defined above to generate the - # experiment/workflow. - try: - expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile) - except: - logging.exception( - dedent( - f""" - ********************************************************************* - FATAL ERROR: - Experiment generation failed. See the error message(s) printed below. - For more detailed information, check the log file from the workflow - generation script: {wflow_logfile} - *********************************************************************\n - """ - ) - ) - - # Note workflow generation completion - log_info( - f""" - ======================================================================== - ======================================================================== - - Experiment generation completed. The experiment directory is: - - EXPTDIR='{EXPTDIR}' - - ======================================================================== - ======================================================================== - """ - ) - - -class Testing(unittest.TestCase): - def test_generate_FV3LAM_wflow(self): - - # run workflows in separate process to avoid conflict between community and nco settings - def run_workflow(USHdir, logfile): - p = Process(target=generate_FV3LAM_wflow, args=(USHdir, logfile)) - p.start() - p.join() - exit_code = p.exitcode - if exit_code != 0: - sys.exit(exit_code) - - USHdir = os.path.dirname(os.path.abspath(__file__)) - logfile = "log.generate_FV3LAM_wflow" - SED = get_env_var("SED") - - # community test case - cp_vrfy(f"{USHdir}/config.community.yaml", f"{USHdir}/config.yaml") - run_command( - f"""{SED} -i 's/MACHINE: hera/MACHINE: linux/g' {USHdir}/config.yaml""" - ) - run_workflow(USHdir, logfile) - - # nco test case - nco_test_config = load_config_file(f"{USHdir}/config.nco.yaml") - # Since we don't have a pre-gen grid dir on a generic linux - # platform, turn the make_* tasks on for this test. - cfg_updates = { - "user": { - "MACHINE": "linux", - }, - "workflow_switches": { - "RUN_TASK_MAKE_GRID": True, - "RUN_TASK_MAKE_OROG": True, - "RUN_TASK_MAKE_SFC_CLIMO": True, - }, - } - update_dict(cfg_updates, nco_test_config) - - with open(f"{USHdir}/config.yaml", "w") as cfg_file: - cfg_file.write(cfg_to_yaml_str(nco_test_config)) - - run_workflow(USHdir, logfile) - - def setUp(self): - define_macos_utilities() - set_env_var("DEBUG", False) - set_env_var("VERBOSE", False) diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py deleted file mode 100755 index 774d311ac0..0000000000 --- a/ush/get_crontab_contents.py +++ /dev/null @@ -1,240 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import unittest -import argparse -from datetime import datetime -from textwrap import dedent - -from python_utils import ( - log_info, - import_vars, - set_env_var, - print_input_args, - run_command, - define_macos_utilities, - print_info_msg, -) - - -def get_crontab_contents(called_from_cron): - """ - #----------------------------------------------------------------------- - # - # This function returns the contents of the user's - # cron table as well as the command to use to manipulate the cron table - # (i.e. the "crontab" command, but on some platforms the version or - # location of this may change depending on other circumstances, e.g. on - # Cheyenne, this depends on whether a script that wants to call "crontab" - # is itself being called from a cron job). Arguments are as follows: - # - # called_from_cron: - # Boolean flag that specifies whether this function (and the scripts or - # functions that are calling it) are called as part of a cron job. Must - # be set to "TRUE" or "FALSE". - # - # outvarname_crontab_cmd: - # Name of the output variable that will contain the command to issue for - # the system "crontab" command. - # - # outvarname_crontab_contents: - # Name of the output variable that will contain the contents of the - # user's cron table. - # - #----------------------------------------------------------------------- - """ - - print_input_args(locals()) - - # import selected env vars - IMPORTS = ["MACHINE", "DEBUG"] - import_vars(env_vars=IMPORTS) - - __crontab_cmd__ = "crontab" - # - # On Cheyenne, simply typing "crontab" will launch the crontab command - # at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized - # version of crontab that will work if called from scripts that are - # themselves being called as cron jobs. In that case, we must instead - # call the system version of crontab at /usr/bin/crontab. - # - if MACHINE == "CHEYENNE": - if called_from_cron: - __crontab_cmd__ = "/usr/bin/crontab" - - print_info_msg( - f""" - Getting crontab content with command: - ========================================================= - {__crontab_cmd__} -l - =========================================================""", - verbose=DEBUG, - ) - - (_, __crontab_contents__, _) = run_command(f"""{__crontab_cmd__} -l""") - - print_info_msg( - f""" - Crontab contents: - ========================================================= - {__crontab_contents__} - =========================================================""", - verbose=DEBUG, - ) - - # replace single quotes (hopefully in comments) with double quotes - __crontab_contents__ = __crontab_contents__.replace("'", '"') - - return __crontab_cmd__, __crontab_contents__ - - -def add_crontab_line(): - """Add crontab line to cron table""" - - # import selected env vars - IMPORTS = ["MACHINE", "CRONTAB_LINE", "VERBOSE", "EXPTDIR"] - import_vars(env_vars=IMPORTS) - - # - # Make a backup copy of the user's crontab file and save it in a file. - # - time_stamp = datetime.now().strftime("%F_%T") - crontab_backup_fp = os.path.join(EXPTDIR, f"crontab.bak.{time_stamp}") - log_info( - f""" - Copying contents of user cron table to backup file: - crontab_backup_fp = '{crontab_backup_fp}'""", - verbose=VERBOSE, - ) - - global called_from_cron - try: - called_from_cron - except: - called_from_cron = False - - # Get crontab contents - crontab_cmd, crontab_contents = get_crontab_contents( - called_from_cron=called_from_cron - ) - - # Create backup - run_command(f"""printf "%s" '{crontab_contents}' > '{crontab_backup_fp}'""") - - # Add crontab line - if CRONTAB_LINE in crontab_contents: - - log_info( - f""" - The following line already exists in the cron table and thus will not be - added: - CRONTAB_LINE = '{CRONTAB_LINE}'""" - ) - - else: - - log_info( - f""" - Adding the following line to the user's cron table in order to automatically - resubmit SRW workflow: - CRONTAB_LINE = '{CRONTAB_LINE}'""", - verbose=VERBOSE, - ) - - # add new line to crontab contents if it doesn't have one - NEWLINE_CHAR = "" - if crontab_contents and crontab_contents[-1] != "\n": - NEWLINE_CHAR = "\n" - - # add the crontab line - run_command( - f"""printf "%s%b%s\n" '{crontab_contents}' '{NEWLINE_CHAR}' '{CRONTAB_LINE}' | {crontab_cmd}""" - ) - - -def delete_crontab_line(called_from_cron): - """Delete crontab line after job is complete i.e. either SUCCESS/FAILURE - but not IN PROGRESS status""" - - print_input_args(locals()) - - # import selected env vars - IMPORTS = ["MACHINE", "CRONTAB_LINE", "DEBUG"] - import_vars(env_vars=IMPORTS) - - # - # Get the full contents of the user's cron table. - # - (crontab_cmd, crontab_contents) = get_crontab_contents(called_from_cron) - # - # Remove the line in the contents of the cron table corresponding to the - # current forecast experiment (if that line is part of the contents). - # Then record the results back into the user's cron table. - # - print_info_msg( - f""" - Crontab contents before delete: - ========================================================= - {crontab_contents} - =========================================================""", - verbose=True, - ) - - if (CRONTAB_LINE + "\n") in crontab_contents: - crontab_contents = crontab_contents.replace(CRONTAB_LINE + "\n", "") - else: - crontab_contents = crontab_contents.replace(CRONTAB_LINE, "") - - run_command(f"""echo '{crontab_contents}' | {crontab_cmd}""") - - print_info_msg( - f""" - Crontab contents after delete: - ========================================================= - {crontab_contents} - =========================================================""", - verbose=True, - ) - - -def parse_args(argv): - """Parse command line arguments for deleting crontab line. - This is needed because it is called from shell script - """ - parser = argparse.ArgumentParser(description="Crontab job manupilation program.") - - parser.add_argument( - "-d", - "--delete", - dest="delete", - action="store_true", - help="Delete crontab line.", - ) - - parser.add_argument( - "-c", - "--called-from-cron", - dest="called_from_cron", - action="store_true", - help="Called from cron.", - ) - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - if args.delete: - delete_crontab_line(args.called_from_cron) - - -class Testing(unittest.TestCase): - def test_get_crontab_contents(self): - crontab_cmd, crontab_contents = get_crontab_contents(called_from_cron=True) - self.assertEqual(crontab_cmd, "crontab") - - def setUp(self): - define_macos_utilities() - set_env_var("DEBUG", False) - set_env_var("MACHINE", "HERA") diff --git a/ush/get_layout.sh b/ush/get_layout.sh deleted file mode 100755 index 878f1b30d9..0000000000 --- a/ush/get_layout.sh +++ /dev/null @@ -1,147 +0,0 @@ -#!/bin/bash -f -########################################################### -# get layout for given nx and ny -# INPUT: nx, ny, number of cpu to be used (optional). -# Output: suggested nx, ny, layout_x, layout_y -# email to: Linlin.Pan@noaa.gov for any questions. -# -########################################################### -if [ "$#" -lt 2 ]; then - echo "You must enter number of grid points in x and y directions" - exit -else - nx=$1 - ny=$2 - echo "nx= $nx, ny= $ny" -fi - -if [ "$#" -eq 3 ]; then - nlayout=$3 - echo "ncups= $nlayout" - layout_x=$(echo "sqrt($nlayout*$nx/$ny)" |bc ) - if [ $layout_x -gt $nx ]; then - $layout_x=$nx - fi -# using even number - if [ $((layout_x%2)) -gt 0 ] ; then - if [ $nx -gt $ny ] ; then - layout_x=$((layout_x+1)) - else - layout_x=$((layout_x-1)) - fi - fi - if [ $layout_x -eq 0 ] ; then - layout_x=2 - fi - if [ $layout_x -gt 24 ]; then - layout_x=24 - fi -# get layout_y - layout_y=$((nlayout/layout_x)) - if [ $((layout_y%2)) -gt 0 ] ; then - layout_y=$((layout_y+1)) - fi - if [ $layout_y -gt 24 ] && [ $layout_x -ne 24 ] ; then - layout_y=24 - layout_x=$((nlayout/layout_y)) - if [ $((layout_x%2)) -gt 0 ] ; then - layout_x=$((layout_x+1)) - fi - fi - if [ $nx -gt $ny ] && [ $layout_x -lt $layout_y ] ; then - temp=$layout_x - layout_x=$layout_y - layout_y=$temp - fi -# get nx, ny - if [ $((nx%layout_x)) -gt 0 ] ; then - nx=$((nx/layout_x*layout_x+layout_x)) - else - nx=$((nx/layout_x*layout_x)) - fi - if [ $((ny%layout_y)) -gt 0 ] ; then - ny=$((ny/layout_y*layout_y+layout_y)) - else - ny=$((ny/layout_y*layout_y)) - fi - echo "suggested layout_x= $layout_x, layout_y $layout_y, and total = $((layout_x*layout_y))" - echo "suggested nx= $nx, ny= $ny" - exit -fi - -nxy=$((nx * ny)) - -if [ $nxy -le 22000 ]; then # 22000 is from predefined HRRR 25km domain - layout_x=2 - layout_y=2 - nx=$((nx+nx%2)) - ny=$((ny+ny%2)) - -elif [ $nxy -gt 22000 ] && [ $nxy -le 81900 ]; then #81900 is obtained from predefined HRRR 13km domain - nlayout=$(((4+96*nxy/81900))) - layout_x=$(echo "sqrt($nlayout)" |bc ) - if [ $layout_x -gt $nx ]; then - $layout_x=$nx - fi - if [ $((layout_x%2)) -gt 0 ] ; then - if [ $nx -gt $ny ] ; then - layout_x=$((layout_x+1)) - else - layout_x=$((layout_x-1)) - fi - fi - layout_y=$((nlayout/layout_x)) - if [ $((layout_y%2)) -gt 0 ] ; then - layout_y=$((layout_y+1)) - fi - if [ $((nx%layout_x)) -gt 0 ] ; then - nx=$((nx/layout_x*layout_x+layout_x)) - else - nx=$((nx/layout_x*layout_x)) - fi - if [ $((ny%layout_y)) -gt 0 ] ; then - ny=$((ny/layout_y*layout_y+layout_y)) - else - ny=$((ny/layout_y*layout_y)) - fi - -elif [ $nxy -gt 81900 ]; then - nlayout=$(((100+716*nxy/1747872))) # 1747872 is obtained from predefined HRRR 3km domain. - layout_x=$(echo "sqrt($nlayout)" |bc ) - if [ $layout_x -gt $nx ]; then - $layout_x=$nx - fi - if [ $layout_x -gt 24 ] ; then - layout_x=24 - layout_y=$((nlayout/layout_x)) - layout_y=$((layout_y+layout_y%2)) - if [ $nx -gt $ny ] && [ $layout_x -lt $layout_y ]; then - layout_x=$layout_y - layout_y=24 - fi - else - layout_y=$((nlayout/layout_x)) - layout_y=$((layout_y+layout_y%2)) - if [ $nx -gt $ny ] && [ $layout_x -lt $layout_y ]; then - temp=$layout_x - layout_x=$layout_y - layout_y=$temp - fi - fi - if [ $((nx%layout_x)) -gt 0 ] ; then - nx=$((nx/layout_x*layout_x+layout_x)) - else - nx=$((nx/layout_x*layout_x)) - fi - if [ $((ny%layout_y)) -gt 0 ] ; then - ny=$((ny/layout_y*layout_y+layout_y)) - else - ny=$((ny/layout_y*layout_y)) - fi -else - echo "Error: nxy= $nxy " - exit -fi - -echo "suggested layout_x= $layout_x, layout_y=$layout_y, total= $((layout_x*layout_y))" -echo "suggested nx= $nx, ny= $ny" diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh deleted file mode 100755 index 3a43ee3eb6..0000000000 --- a/ush/launch_FV3LAM_wflow.sh +++ /dev/null @@ -1,404 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# Set shell options. -# -#----------------------------------------------------------------------- -# -set -u -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { \ - echo >&2 "\ -For Darwin-based operating systems (MacOS), the 'greadlink' utility is -required to run the UFS SRW Application. Reference the User's Guide for -more information about platform requirements. Aborting."; \ - exit 1; \ - } - scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) -else - scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -fi -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the experiment directory. We assume that there is a symlink to -# this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is -# located will give us the experiment directory. We find this by first -# obtaining the directory portion (i.e. the portion without the name of -# this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the corresponding -# absolute path. This will work for all four of the following ways in -# which the symlink in the experiment directory pointing to this script -# may be called: -# -# 1) Call this script from the experiment directory: -# > cd /path/to/experiment/directory -# > launch_FV3LAM_wflow.sh -# -# 2) Call this script from the experiment directory but using "./" before -# the script name: -# > cd /path/to/experiment/directory -# > ./launch_FV3LAM_wflow.sh -# -# 3) Call this script from any directory using the absolute path to the -# symlink in the experiment directory: -# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh -# -# 4) Call this script from a directory that is several levels up from the -# experiment directory (but not necessarily at the root directory): -# > cd /path/to -# > experiment/directory/launch_FV3LAM_wflow.sh -# -# Note that given just a file name, e.g. the name of this script without -# any path before it, the "dirname" command will return a ".", e.g. in -# bash, -# -# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" ) -# > echo $exptdir -# -# will print out ".". -# -#----------------------------------------------------------------------- -# -exptdir=$( dirname "$0" ) -if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { \ - echo >&2 "\ -For Darwin-based operating systems (MacOS), the 'greadlink' utility is -required to run the UFS SRW Application. Reference the User's Guide for -more information about platform requirements. Aborting."; - exit 1; - } - exptdir=$( greadlink -f "$exptdir" ) -else - exptdir=$( readlink -f "$exptdir" ) -fi -# -#----------------------------------------------------------------------- -# -# Source necessary files. -# -#----------------------------------------------------------------------- -# -. $exptdir/var_defns.sh -. $USHdir/source_util_funcs.sh -# -#----------------------------------------------------------------------- -# -# Declare arguments. -# -#----------------------------------------------------------------------- -# -valid_args=( \ - "called_from_cron" \ - ) -process_args valid_args "$@" -print_input_args "valid_args" -# -#----------------------------------------------------------------------- -# -# Make sure called_from_cron is set to a valid value. -# -#----------------------------------------------------------------------- -# -called_from_cron=${called_from_cron:-"FALSE"} -check_var_valid_value "called_from_cron" "valid_vals_BOOLEAN" -called_from_cron=$(boolify "${called_from_cron}") -# -#----------------------------------------------------------------------- -# -# Set the name of the experiment. We take this to be the name of the -# experiment subdirectory (i.e. the string after the last "/" in the -# full path to the experiment directory). -# -#----------------------------------------------------------------------- -# -expt_name="${EXPT_SUBDIR}" -# -#----------------------------------------------------------------------- -# -# Load necessary modules. -# -#----------------------------------------------------------------------- -# -machine=$(echo_lowercase $MACHINE) - -. ${USHdir}/load_modules_wflow.sh ${machine} - -# -#----------------------------------------------------------------------- -# -# Set file names. These include the rocoto database file and the log -# file in which to store output from this script (aka the workflow -# launch script). -# -#----------------------------------------------------------------------- -# -rocoto_xml_bn=$( basename "${WFLOW_XML_FN}" ".xml" ) -rocoto_database_fn="${rocoto_xml_bn}.db" -launch_log_fn="log.launch_${rocoto_xml_bn}" -# -#----------------------------------------------------------------------- -# -# Initialize the default status of the workflow to "IN PROGRESS". -# -#----------------------------------------------------------------------- -# -wflow_status="IN PROGRESS" -# -#----------------------------------------------------------------------- -# -# Change location to the experiment directory. -# -#----------------------------------------------------------------------- -# -cd "$exptdir" -# -#----------------------------------------------------------------------- -# -# Issue the rocotorun command to (re)launch the next task in the workflow. -# Then check for error messages in the output of rocotorun. If any are -# found, it means the end-to-end run of the workflow failed, so set the -# status of the workflow to "FAILURE". -# -#----------------------------------------------------------------------- -# -tmp_fn="rocotorun_output.txt" -rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" -eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 || \ - print_err_msg_exit "\ -Call to \"rocotorun\" failed with return code $?." -rocotorun_output=$( cat "${tmp_fn}" ) -rm "${tmp_fn}" - -error_msg="sbatch: error: Batch job submission failed:" -while read -r line; do - grep_output=$( printf "%s" "$line" | grep "${error_msg}" ) - if [ $? -eq 0 ]; then - wflow_status="FAILURE" - break - fi -done <<< "${rocotorun_output}" -# -#----------------------------------------------------------------------- -# -# Issue the rocotostat command to obtain a table specifying the status -# of each task. Then check for dead tasks in the output of rocotostat. -# If any are found, it means the end-to-end run of the workflow failed, -# so set the status of the workflow (wflow_status) to "FAILURE". -# -#----------------------------------------------------------------------- -# -rocotostat_cmd="rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" -rocotostat_output=$( eval ${rocotostat_cmd} 2>&1 || \ - print_err_msg_exit "\ -Call to \"rocotostat\" failed with return code $?." - ) - -error_msg="DEAD" -while read -r line; do - grep_output=$( printf "%s" "$line" | grep "${error_msg}" ) - if [ $? -eq 0 ]; then - wflow_status="FAILURE" - break - fi -done <<< "${rocotostat_output}" -# -#----------------------------------------------------------------------- -# -# Place the outputs of the rocotorun and rocotostat commands obtained -# above into the launch log file. -# -#----------------------------------------------------------------------- -# -printf "%s" " - -======================================================================== -Start of output from script \"${scrfunc_fn}\". -======================================================================== - -Running rocotorun command (rocotorun_cmd): -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - rocotorun_cmd = \'${rocotorun_cmd}\' - -Output of rocotorun_cmd is: -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -${rocotorun_output} - -Running rocotostat command (rocotostat_cmd): -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - rocotostat_cmd = \'${rocotostat_cmd}\' - -Output of rocotostat_cmd is: -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -${rocotostat_output} -" >> "${WFLOW_LAUNCH_LOG_FN}" 2>&1 -# -#----------------------------------------------------------------------- -# -# Use the rocotostat command with the "-s" flag to obtain a summary of -# the status of each cycle in the workflow. The output of this command -# has the following format: -# -# CYCLE STATE ACTIVATED DEACTIVATED -# 201905200000 Active Nov 07 2019 00:23:30 - -# ... -# -# Thus, the first row is a header line containing the column titles, and -# the remaining rows each correspond to one cycle in the workflow. Below, -# we are interested in the first and second columns of each row. The -# first column is a string containing the start time of the cycle (in the -# format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the 2-digit -# month, DD is the 2-digit day of the month, HH is the 2-digit hour of -# the day, and mm is the 2-digit minute of the hour). The second column -# is a string containing the state of the cycle. This can be "Active" -# or "Done". Below, we read in and store these two columns in (1-D) -# arrays. -# -#----------------------------------------------------------------------- -# -rocotostat_output=$( rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 -s ) - -regex_search="^[ ]*([0-9]+)[ ]+([A-Za-z]+)[ ]+.*" -cycle_str=() -cycle_status=() -i=0 -while read -r line; do -# -# Note that the first line in rocotostat_output is a header line containing -# the column titles. Thus, we ignore it and consider only the remaining -# lines (of which there is one per cycle). -# - if [ $i -gt 0 ]; then - im1=$((i-1)) - cycle_str[im1]=$( echo "$line" | $SED -r -n -e "s/${regex_search}/\1/p" ) - cycle_status[im1]=$( echo "$line" | $SED -r -n -e "s/${regex_search}/\2/p" ) - fi - i=$((i+1)) -done <<< "${rocotostat_output}" -# -#----------------------------------------------------------------------- -# -# Get the number of cycles. Then count the number of completed cycles -# by finding the number of cycles for which the corresponding element in -# the cycle_status array is set to "Done". -# -#----------------------------------------------------------------------- -# -num_cycles_total=${#cycle_str[@]} -num_cycles_completed=0 -for (( i=0; i<=$((num_cycles_total-1)); i++ )); do - if [ "${cycle_status[i]}" = "Done" ]; then - num_cycles_completed=$((num_cycles_completed+1)) - fi -done -# -#----------------------------------------------------------------------- -# -# If the number of completed cycles is equal to the total number of cycles, -# it means the end-to-end run of the workflow was successful. In this -# case, we reset the wflow_status to "SUCCESS". -# -#----------------------------------------------------------------------- -# -if [ ${num_cycles_completed} -eq ${num_cycles_total} ]; then - wflow_status="SUCCESS" -fi -# -#----------------------------------------------------------------------- -# -# Print informational messages about the workflow to the launch log file, -# including the workflow status. -# -#----------------------------------------------------------------------- -# -printf "%s" " - -Summary of workflow status: -~~~~~~~~~~~~~~~~~~~~~~~~~~ - - ${num_cycles_completed} out of ${num_cycles_total} cycles completed. - Workflow status: ${wflow_status} - -======================================================================== -End of output from script \"${scrfunc_fn}\". -======================================================================== - -" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 -# -#----------------------------------------------------------------------- -# -# If the workflow status (wflow_status) has been set to either "SUCCESS" -# or "FAILURE", indicate this by appending an appropriate workflow -# completion message to the end of the launch log file. -# -#----------------------------------------------------------------------- -# -if [ "${wflow_status}" = "SUCCESS" ] || \ - [ "${wflow_status}" = "FAILURE" ]; then - - msg=" -The end-to-end run of the workflow for the forecast experiment specified -by expt_name has completed with the following workflow status (wflow_status): - expt_name = \"${expt_name}\" - wflow_status = \"${wflow_status}\" -" -# -# If a cron job was being used to periodically relaunch the workflow, we -# now remove the entry in the crontab corresponding to the workflow -# because the end-to-end run of the workflow has now either succeeded or -# failed and will remain in that state without manual user intervention. -# Thus, there is no need to try to relaunch it. We also append a message -# to the completion message above to indicate this. -# - if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then - - msg="${msg}\ -Thus, there is no need to relaunch the workflow via a cron job. Removing -from the crontab the line (CRONTAB_LINE) that calls the workflow launch -script for this experiment: - CRONTAB_LINE = \"${CRONTAB_LINE}\" -" -# -# Remove CRONTAB_LINE from cron table -# - if [ "${called_from_cron}" = "TRUE" ]; then - MACHINE=$MACHINE CRONTAB_LINE=$CRONTAB_LINE \ - python3 $USHdir/get_crontab_contents.py --delete --called-from-cron - else - MACHINE=$MACHINE CRONTAB_LINE=$CRONTAB_LINE \ - python3 $USHdir/get_crontab_contents.py --delete - fi - fi -# -# Print the workflow completion message to the launch log file. -# - printf "%s" "$msg" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 -# -# If the stdout from this script is being sent to the screen (e.g. it is -# not being redirected to a file), then also print out the workflow -# completion message to the screen. -# - if [ -t 1 ]; then - printf "%s" "$msg" - fi - -fi diff --git a/ush/link_fix.py b/ush/link_fix.py deleted file mode 100755 index df02e9c411..0000000000 --- a/ush/link_fix.py +++ /dev/null @@ -1,458 +0,0 @@ -#!/usr/bin/env python3 - -import unittest -import os -import sys -import argparse -import re -import glob - -from python_utils import ( - import_vars, - print_input_args, - print_info_msg, - print_err_msg_exit, - create_symlink_to_file, - define_macos_utilities, - check_var_valid_value, - flatten_dict, - cd_vrfy, - mkdir_vrfy, - find_pattern_in_str, - load_shell_config, -) - - -def link_fix( - verbose, - file_group, - source_dir, - target_dir, - ccpp_phys_suite, - constants, - dot_or_uscore, - nhw, - run_task, - sfc_climo_fields, - **kwargs, -): - """This file defines a function that links fix files to the target - directory for a given SRW experiment. Only links files for one group - at a time. - - Args: - cfg_d: dictionary of settings - file_group: could be on of ["grid", "orog", "sfc_climo"] - source_dir: the path to directory where the file_group fix files - are linked from - target_dir: the directory where the fix files should be linked to - dot_or_uscore: str containing either a dot or an underscore - nhw: grid parameter setting - constants: dict containing the constants used by SRW - run_task: boolean value indicating whether the task is to be run - in the experiment - climo_fields: list of fields needed for climo - - Returns: - a string: resolution - """ - - print_input_args(locals()) - - valid_vals_file_group = ["grid", "orog", "sfc_climo"] - check_var_valid_value(file_group, valid_vals_file_group) - - # Decompress the constants needed below. - nh0 = constants["NH0"] - nh3 = constants["NH3"] - nh4 = constants["NH4"] - tile_rgnl = constants["TILE_RGNL"] - - # - # ----------------------------------------------------------------------- - # - # Create symlinks in the target_dir pointing to the fix files. - # These symlinks are needed by the make_orog, make_sfc_climo, - # make_ic, make_lbc, and/or run_fcst tasks. - # - # Note that we check that each target file exists before attempting to - # create symlinks. This is because the "ln" command will create sym- - # links to non-existent targets without returning with a nonzero exit - # code. - # - # ----------------------------------------------------------------------- - # - print_info_msg( - f"Creating links in the {target_dir} directory to the grid files...", - verbose=verbose, - ) - # - # ----------------------------------------------------------------------- - # - # Create globbing patterns for grid, orography, and surface climatology - # files. - # - # - # For grid files (i.e. file_group set to "grid"), symlinks are created - # in the FIXlam directory to files (of the same names) in the GRID_DIR. - # These symlinks/files and the reason each is needed is listed below: - # - # 1) "C*.mosaic.halo${NHW}.nc" - # This mosaic file for the wide-halo grid (i.e. the grid with a ${NHW}- - # cell-wide halo) is needed as an input to the orography filtering - # executable in the orography generation task. The filtering code - # extracts from this mosaic file the name of the file containing the - # grid on which it will generate filtered topography. Note that the - # orography generation and filtering are both performed on the wide- - # halo grid. The filtered orography file on the wide-halo grid is then - # shaved down to obtain the filtered orography files with ${NH3}- and - # ${NH4}-cell-wide halos. - # - # The raw orography generation step in the make_orog task requires the - # following symlinks/files: - # - # a) C*.mosaic.halo${NHW}.nc - # The script for the make_orog task extracts the name of the grid - # file from this mosaic file; this name should be - # "C*.grid.tile${TILE_RGNL}.halo${NHW}.nc". - # - # b) C*.grid.tile${TILE_RGNL}.halo${NHW}.nc - # This is the - # The script for the make_orog task passes the name of the grid - # file (extracted above from the mosaic file) to the orography - # generation executable. The executable then - # reads in this grid file and generates a raw orography - # file on the grid. The raw orography file is initially renamed "out.oro.nc", - # but for clarity, it is then renamed "C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc". - # - # c) The fixed files thirty.second.antarctic.new.bin, landcover30.fixed, - # and gmted2010.30sec.int. - # - # The orography filtering step in the make_orog task requires the - # following symlinks/files: - # - # a) C*.mosaic.halo${NHW}.nc - # This is the mosaic file for the wide-halo grid. The orography - # filtering executable extracts from this file the name of the grid - # file containing the wide-halo grid (which should be - # "${CRES}.grid.tile${TILE_RGNL}.halo${NHW}.nc"). The executable then - # looks for this grid file IN THE DIRECTORY IN WHICH IT IS RUNNING. - # Thus, before running the executable, the script creates a symlink in this run directory that - # points to the location of the actual wide-halo grid file. - # - # b) C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc - # This is the raw orography file on the wide-halo grid. The script - # for the make_orog task copies this file to a new file named - # "C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc" that will be - # used as input to the orography filtering executable. The executable - # will then overwrite the contents of this file with the filtered orography. - # Thus, the output of the orography filtering executable will be - # the file C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc. - # - # The shaving step in the make_orog task requires the following: - # - # a) C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc - # This is the filtered orography file on the wide-halo grid. - # This gets shaved down to two different files: - # - # i) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH0}.nc - # This is the filtered orography file on the halo-0 grid. - # - # ii) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH4}.nc - # This is the filtered orography file on the halo-4 grid. - # - # Note that the file names of the shaved files differ from that of - # the initial unshaved file on the wide-halo grid in that the field - # after ${CRES} is now "oro_data" (not "filtered_orog") to comply - # with the naming convention used more generally. - # - # 2) "C*.mosaic.halo${NH4}.nc" - # This mosaic file for the grid with a 4-cell-wide halo is needed as - # an input to the surface climatology generation executable. The - # surface climatology generation code reads from this file the number - # of tiles (which should be 1 for a regional grid) and the tile names. - # More importantly, using the ESMF function ESMF_GridCreateMosaic(), - # it creates a data object of type esmf_grid; the grid information - # in this object is obtained from the grid file specified in the mosaic - # file, which should be "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc". The - # dimensions specified in this grid file must match the ones specified - # in the (filtered) orography file "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc" - # that is also an input to the surface climatology generation executable. - # If they do not, then the executable will crash with an ESMF library - # error (something like "Arguments are incompatible"). - # - # Thus, for the make_sfc_climo task, the following symlinks/files must - # exist: - # a) "C*.mosaic.halo${NH4}.nc" - # b) "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc" - # c) "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc" - # - # 3) - # - # - # ----------------------------------------------------------------------- - # - # - if file_group == "grid": - fns = [ - f"C*{dot_or_uscore}mosaic.halo{nhw}.nc", - f"C*{dot_or_uscore}mosaic.halo{nh4}.nc", - f"C*{dot_or_uscore}mosaic.halo{nh3}.nc", - f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nhw}.nc", - f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh3}.nc", - f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh4}.nc", - ] - - elif file_group == "orog": - fns = [ - f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh0}.nc", - f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh4}.nc", - ] - if ccpp_phys_suite == "FV3_HRRR" or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" or ccpp_phys_suite == "FV3_GFS_v17_p8": - fns += [ - f"C*{dot_or_uscore}oro_data_ss.tile{tile_rgnl}.halo{nh0}.nc", - f"C*{dot_or_uscore}oro_data_ls.tile{tile_rgnl}.halo{nh0}.nc", - ] - - # - # The following list of symlinks (which have the same names as their - # target files) need to be created for the make_ics and make_lbcs - # tasks (i.e. tasks involving chgres_cube) to work. - # - elif file_group == "sfc_climo": - fns = [] - for sfc_climo_field in sfc_climo_fields: - fns.append(f"C*.{sfc_climo_field}.tile{tile_rgnl}.halo{nh0}.nc") - fns.append(f"C*.{sfc_climo_field}.tile{tile_rgnl}.halo{nh4}.nc") - - fps = [os.path.join(source_dir, itm) for itm in fns] - # - # ----------------------------------------------------------------------- - # - # Find all files matching the globbing patterns and make sure that they - # all have the same resolution (an integer) in their names. - # - # ----------------------------------------------------------------------- - # - i = 0 - res_prev = "" - res = "" - fp_prev = "" - - for pattern in fps: - files = glob.glob(pattern) - if not files: - print_err_msg_exit( - f""" - Trying to link files in group: {file_group} - No files were found matching the pattern {pattern}. - """ - ) - for fp in files: - - fn = os.path.basename(fp) - - regex_search = "^C([0-9]*).*" - res = find_pattern_in_str(regex_search, fn) - if not res: - print_err_msg_exit( - f""" - The resolution could not be extracted from the current file's name. The - full path to the file (fp) is: - fp = '{fp}' - This may be because fp contains the * globbing character, which would - imply that no files were found that match the globbing pattern specified - in fp.""" - ) - else: - res = res[0] - - if (i > 0) and (res != res_prev): - print_err_msg_exit( - f""" - The resolutions (as obtained from the file names) of the previous and - current file (fp_prev and fp, respectively) are different: - fp_prev = '{fp_prev}' - fp = '{fp}' - Please ensure that all files have the same resolution.""" - ) - - i = i + 1 - fp_prev = f"{fp}" - res_prev = res - # - # ----------------------------------------------------------------------- - # - # Replace the * globbing character in the set of globbing patterns with - # the resolution. This will result in a set of (full paths to) specific - # files. - # - # ----------------------------------------------------------------------- - # - fps = [itm.replace("*", res) for itm in fps] - # - # ----------------------------------------------------------------------- - # - # In creating the various symlinks below, it is convenient to work in - # the FIXlam directory. We will change directory back to the original - # later below. - # - # ----------------------------------------------------------------------- - # - save_dir = os.getcwd() - cd_vrfy(target_dir) - # - # ----------------------------------------------------------------------- - # - # Use the set of full file paths generated above as the link targets to - # create symlinks to these files in the target directory. - # - # ----------------------------------------------------------------------- - # - # If the task in consideration (one of the pre-processing tasks - # TN_MAKE_GRID, TN_MAKE_OROG, and TN_MAKE_SFC_CLIMO) was run, then - # the source location of the fix files will be located under the - # experiment directory. In this case, we use relative symlinks for - # portability and readability. Make absolute links otherwise. - # - relative_link_flag = False - #if run_task: - # relative_link_flag = True - - if relative_link_flag: - for fp in fps: - fn = os.path.basename(fp) - create_symlink_to_file(fp, fn, relative_link_flag) - # - # ----------------------------------------------------------------------- - # - # Set the C-resolution based on the resolution appearing in the file - # names. - # - # ----------------------------------------------------------------------- - # - cres = f"C{res}" - # - # ----------------------------------------------------------------------- - # - # If considering grid files, create a symlink to the halo4 grid file - # that does not contain the halo size in its name. This is needed by - # the tasks that generate the initial and lateral boundary condition - # files. - # - # ----------------------------------------------------------------------- - # - if file_group == "grid": - target = f"{cres}{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh4}.nc" - symlink = f"{cres}{dot_or_uscore}grid.tile{tile_rgnl}.nc" - create_symlink_to_file(target, symlink, True) - # - # ----------------------------------------------------------------------- - # - # If considering surface climatology files, create symlinks to the surface - # climatology files that do not contain the halo size in their names. - # These are needed by the make_ics task. - # - # The forecast model needs sfc climo files to be named without the - # tile7 and halo references, and with only "tile1" in the name. - # - # ----------------------------------------------------------------------- - # - if file_group == "sfc_climo": - - for field in sfc_climo_fields: - - # Create links without "halo" in the name - halo = f"{cres}.{field}.tile{tile_rgnl}.halo{nh4}.nc" - no_halo = re.sub(f".halo{nh4}", "", halo) - create_symlink_to_file(halo, no_halo, True) - - # Create links without halo and tile7, and with "tile1" - halo_tile = f"{cres}.{field}.tile{tile_rgnl}.halo{nh0}.nc" - no_halo_tile = re.sub(f"tile{tile_rgnl}.halo{nh0}", "tile1", halo_tile) - create_symlink_to_file(halo_tile, no_halo_tile, True) - - # Change directory back to original one. - cd_vrfy(save_dir) - - return res - - -def parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser( - description="Creates symbolic links to FIX directories." - ) - - parser.add_argument( - "-f", - "--file-group", - dest="file_group", - required=True, - help='File group, could be one of ["grid", "orog", "sfc_climo"].', - ) - - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - link_fix( - verbose=cfg["workflow"]["VERBOSE"], - file_group=args.file_group, - source_dir=cfg[f"task_make_{args.file_group.lower()}"][ - f"{args.file_group.upper()}_DIR" - ], - target_dir=cfg["workflow"]["FIXlam"], - ccpp_phys_suite=cfg["workflow"]["CCPP_PHYS_SUITE"], - constants=cfg["constants"], - dot_or_uscore=cfg["workflow"]["DOT_OR_USCORE"], - nhw=cfg["grid_params"]["NHW"], - run_task=True, - sfc_climo_fields=cfg["fixed_files"]["SFC_CLIMO_FIELDS"], - ) - - -class Testing(unittest.TestCase): - def test_link_fix(self): - res = link_fix( - verbose=True, - file_group="grid", - source_dir=self.task_dir, - target_dir=self.FIXlam, - ccpp_phys_suite=self.cfg["CCPP_PHYS_SUITE"], - constants=self.cfg["constants"], - dot_or_uscore=self.cfg["DOT_OR_USCORE"], - nhw=self.cfg["NHW"], - run_task=False, - sfc_climo_fields=["foo", "bar"], - ) - self.assertTrue(res == "3357") - - def setUp(self): - define_macos_utilities() - TEST_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_data") - self.FIXlam = os.path.join(TEST_DIR, "expt", "fix_lam") - self.task_dir = os.path.join(TEST_DIR, "RRFS_CONUS_3km") - mkdir_vrfy("-p", self.FIXlam) - - self.cfg = { - "DOT_OR_USCORE": "_", - "NHW": 6, - "CCPP_PHYS_SUITE": "FV3_GSD_SAR", - "constants": { - "NH0": 0, - "NH4": 4, - "NH3": 3, - "TILE_RGNL": 7, - }, - } diff --git a/ush/load_modules_wflow.sh b/ush/load_modules_wflow.sh deleted file mode 100755 index 93392bba9d..0000000000 --- a/ush/load_modules_wflow.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This script loads the workflow modulefile for a given machine. -# It is a central place for all other scripts so that this is the only -# place workflow module loading can be modified. -# -#----------------------------------------------------------------------- -# - -function usage() { - cat << EOF_USAGE -Usage: source $0 PLATFORM - -OPTIONS: - PLATFORM - name of machine you are on - (e.g. cheyenne | hera | jet | orion | wcoss2 ) -EOF_USAGE -} - -# Make sure machine name is passed as first argument -if [ $# -eq 0 ]; then - usage - exit 1 -fi - -# help message -if [ "$1" == "--help" ] || [ "$1" == "-h" ]; then - usage - exit 0 -fi - -# Set machine name to lowercase -machine=${1,,} - -# Get home directory -scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -HOMEaqm=$( dirname "${scrfunc_dir}" ) - -# source version file (run) only if it is specified in versions directory -RUN_VER_FN="run.ver" -VERSION_FILE="${HOMEaqm}/versions/${RUN_VER_FN}" -if [ -f ${VERSION_FILE} ]; then - . ${VERSION_FILE} -fi - -# Source modulefile for this machine -WFLOW_MOD_FN="wflow_${machine}" -module reset -module use "${HOMEaqm}/modulefiles" -module load "${WFLOW_MOD_FN}" > /dev/null 2>&1 || { echo "ERROR: -Loading of platform-specific module file (WFLOW_MOD_FN) for the workflow -task failed: - WFLOW_MOD_FN = \"${WFLOW_MOD_FN}\""; exit 1; } - -# Activate conda -[[ ${SHELLOPTS} =~ nounset ]] && has_mu=true || has_mu=false - -$has_mu && set +u - -if [ ! -z $(command -v conda) ]; then - conda activate regional_workflow -fi - -$has_mu && set -u - -# List loaded modulefiles -module --version -module list - diff --git a/ush/make_grid_mosaic_file.sh b/ush/make_grid_mosaic_file.sh deleted file mode 100755 index 3890f93a22..0000000000 --- a/ush/make_grid_mosaic_file.sh +++ /dev/null @@ -1,189 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This file defines a function that creates a grid mosaic file from the -# specified grid file. -# -#----------------------------------------------------------------------- -# -function make_grid_mosaic_file() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names that this script/function can -# accept. Then process the arguments provided to it (which should con- -# sist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ -"grid_dir" \ -"grid_fn" \ -"mosaic_fn" \ -"run_cmd" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local exec_fn \ - exec_fp \ - grid_fp \ - mosaic_fp \ - mosaic_fp_prefix -# -#----------------------------------------------------------------------- -# -# Set the name and path to the executable that creates a grid mosaic file -# and make sure that it exists. -# -#----------------------------------------------------------------------- -# - exec_fn="make_solo_mosaic" - exec_fp="$EXECdir/${exec_fn}" - if [ ! -f "${exec_fp}" ]; then - print_err_msg_exit "\ -The executable (exec_fp) for generating the grid mosaic file does not -exist: - exec_fp = \"${exec_fp}\" -Please ensure that you've built this executable." - fi -# -#----------------------------------------------------------------------- -# -# Create the grid mosaic file for the grid with a NH4-cell-wide halo. -# -#----------------------------------------------------------------------- -# - grid_fp="${grid_dir}/${grid_fn}" - mosaic_fp="${grid_dir}/${mosaic_fn}" - mosaic_fp_prefix="${mosaic_fp%.*}" -# -# Call the make_solo_mosaic executable/code to generate a mosaic file. -# Note the following about this code: -# -# 1) The code attempts to open the grid file specified by the argument -# of --tile_file in the directory specified by the argument of --dir. -# If it cannot find this file, it will fail. -# -# Note that: -# -# a) The argument of --grid may or may not contain a "/" at the end. -# The code will add a "/" if necessary when appending the argument -# of --tile_file to that of --grid to form the full path to the -# grid file. -# -# b) The code creates a string variable named "gridlocation" in the -# mosaic file that contains the argument of --dir followed if -# necessary by a "/". -# -# c) The code creates a string array variable named "gridfiles" in the -# mosaic file that has only a single element (for the case of a -# global or nested grid, it would contain more elements). This -# element contains the argument of --grid, i.e. the name of the -# grid file. -# -# 2) The argument of --mosaic must be the absolute or relative path to -# the netcdf mosaic file that is to be created but without the ".nc" -# file extension. For example, if we want the mosaic file to be in -# the directory /abc/def and be called ghi.nc, then we would specify -# -# --mosaic "/abc/def/ghi" -# -# Note that: -# -# a) All parts of the specified path except the last one (i.e. the -# substring after the last "/", which is the name of the mosaic -# file without the ".nc" extension) must exist. If they don't, -# the code will fail. -# -# b) If the argument of --mosaic is a relative path, then the code -# assumes that this path is relative to the current working directory, -# i.e. the directory from which the make_solo_mosaic executable is -# called. -# -# c) If the argument of --mosaic ends with a "/", then it is the path -# to a directory, not to a file. In this case, a mosaic file named -# ".nc" will be created in this absolute or relative directory. -# For example, if the argument of --mosaic is "/abc/def/", then a -# file named ".nc" will be created in the directory /abc/def -# (assuming the directory /abc/def exists). This is generally not -# what we want, so the argument to --mosaic should not end with a -# "/" -# -# 3) The code creates a string variable named "mosaic" in the mosaic file. -# This gets set exactly to the argument of --mosaic without any -# modifications. Thus, if this argument is a relative path, "mosaic" -# will be set to that relative path without the current working directory -# prepended to it. Similarly, "mosaic" will normally not contain at -# its end the ".nc" extension of the mosaic file (unless the argument -# to --mosaic itself contains that extension, e.g. if the argument is -# "/abc/def/ghi.nc", but in that case the mosaic file will be in the -# directory /abc/def and named ghi.nc.nc -- note the double ".nc" -# extensions). -# - ${run_cmd} "${exec_fp}" \ - --num_tiles 1 \ - --dir "${grid_dir}" \ - --tile_file "${grid_fn}" \ - --mosaic "${mosaic_fp_prefix}" || \ - print_err_msg_exit "\ -Call to executable (exec_fp) that generates a grid mosaic file for a -regional grid returned with nonzero exit code: - exec_fp = \"${exec_fp}\"" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the start of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/mrms_pull_topofhour.py b/ush/mrms_pull_topofhour.py deleted file mode 100755 index bfca98fb4e..0000000000 --- a/ush/mrms_pull_topofhour.py +++ /dev/null @@ -1,169 +0,0 @@ -import sys, os, shutil, subprocess -import datetime -import re, csv, glob -import bisect -import numpy as np -import unittest - -if __name__ == "__main__": - # Copy and unzip MRMS files that are closest to top of hour - # Done every hour on a 20-minute lag - - # Include option to define valid time on command line - # Used to backfill verification - # try: - valid_time = str(sys.argv[1]) - - YYYY = int(valid_time[0:4]) - MM = int(valid_time[4:6]) - DD = int(valid_time[6:8]) - HH = int(valid_time[8:19]) - - valid = datetime.datetime(YYYY, MM, DD, HH, 0, 0) - - # except IndexError: - # valid_time = None - - # Default to current hour if not defined on command line - # if valid_time is None: - # now = datetime.datetime.utcnow() - # YYYY = int(now.strftime('%Y')) - # MM = int(now.strftime('%m')) - # DD = int(now.strftime('%d')) - # HH = int(now.strftime('%H')) - - # valid = datetime.datetime(YYYY,MM,DD,HH,0,0) - # valid_time = valid.strftime('%Y%m%d%H') - - print("Pulling " + valid_time + " MRMS data") - - # Set up working directory - DATA_HEAD = str(sys.argv[2]) - MRMS_PROD_DIR = str(sys.argv[3]) - MRMS_PRODUCT = str(sys.argv[4]) - level = str(sys.argv[5]) - - VALID_DIR = os.path.join(DATA_HEAD, valid.strftime("%Y%m%d")) - if not os.path.exists(VALID_DIR): - os.makedirs(VALID_DIR) - os.chdir(DATA_HEAD) - - # Sort list of files for each MRMS product - print(valid.strftime("%Y%m%d")) - if valid.strftime("%Y%m%d") < "20200303": - search_path = ( - MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/dcom/us007003/ldmdata/obs/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + MRMS_PRODUCT - + "*.gz" - ) - elif valid.strftime("%Y%m%d") >= "20200303": - search_path = ( - MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + MRMS_PRODUCT - + "*.gz" - ) - file_list = [f for f in glob.glob(search_path)] - time_list = [file_list[x][-24:-9] for x in range(len(file_list))] - int_list = [ - int(time_list[x][0:8] + time_list[x][9:15]) for x in range(len(time_list)) - ] - int_list.sort() - datetime_list = [ - datetime.datetime.strptime(str(x), "%Y%m%d%H%M%S") for x in int_list - ] - - # Find the MRMS file closest to the valid time - i = bisect.bisect_left(datetime_list, valid) - closest_timestamp = min( - datetime_list[max(0, i - 1) : i + 2], key=lambda date: abs(valid - date) - ) - - # Check to make sure closest file is within +/- 15 mins of top of the hour - # Copy and rename the file for future ease - difference = abs(closest_timestamp - valid) - if difference.total_seconds() <= 900: - filename1 = ( - MRMS_PRODUCT - + level - + closest_timestamp.strftime("%Y%m%d-%H%M%S") - + ".grib2.gz" - ) - filename2 = MRMS_PRODUCT + level + valid.strftime("%Y%m%d-%H") + "0000.grib2.gz" - - if valid.strftime("%Y%m%d") < "20200303": - print( - "cp " - + MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/dcom/us007003/ldmdata/obs/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + filename1 - + " " - + VALID_DIR - + "/" - + filename2 - ) - - os.system( - "cp " - + MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/dcom/us007003/ldmdata/obs/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + filename1 - + " " - + VALID_DIR - + "/" - + filename2 - ) - os.system("gunzip " + VALID_DIR + "/" + filename2) - elif valid.strftime("%Y%m%d") >= "20200303": - print( - "cp " - + MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + filename1 - + " " - + VALID_DIR - + "/" - + filename2 - ) - - os.system( - "cp " - + MRMS_PROD_DIR - + "/" - + valid.strftime("%Y%m%d") - + "/upperair/mrms/conus/" - + MRMS_PRODUCT - + "/" - + filename1 - + " " - + VALID_DIR - + "/" - + filename2 - ) - os.system("gunzip " + VALID_DIR + "/" + filename2) - -# dummy unittest -class Testing(unittest.TestCase): - def test_mrms_pull_topfhour(self): - pass diff --git a/ush/predef_grid_params.yaml b/ush/predef_grid_params.yaml deleted file mode 100755 index 145d5bfa51..0000000000 --- a/ush/predef_grid_params.yaml +++ /dev/null @@ -1,942 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# Set grid and other parameters according to the value of the predefined -# domain (PREDEF_GRID_NAME). Note that the code will enter this script -# only if PREDEF_GRID_NAME has a valid (and non-empty) value. -# -#################### -# The following comments need to be updated: -#################### -# -# 1) Reset the experiment title (expt_title). -# 2) Reset the grid parameters. -# 3) If the write component is to be used (i.e. QUILTING is set to -# "TRUE") and the variable WRTCMP_PARAMS_TMPL_FN containing the name -# of the write-component template file is unset or empty, set that -# filename variable to the appropriate preexisting template file. -# -# For the predefined domains, we determine the starting and ending indi- -# ces of the regional grid within tile 6 by specifying margins (in units -# of number of cells on tile 6) between the boundary of tile 6 and that -# of the regional grid (tile 7) along the left, right, bottom, and top -# portions of these boundaries. Note that we do not use "west", "east", -# "south", and "north" here because the tiles aren't necessarily orient- -# ed such that the left boundary segment corresponds to the west edge, -# etc. The widths of these margins (in units of number of cells on tile -# 6) are specified via the parameters -# -# num_margin_cells_T6_left -# num_margin_cells_T6_right -# num_margin_cells_T6_bottom -# num_margin_cells_T6_top -# -# where the "_T6" in these names is used to indicate that the cell count -# is on tile 6, not tile 7. -# -# Note that we must make the margins wide enough (by making the above -# four parameters large enough) such that a region of halo cells around -# the boundary of the regional grid fits into the margins, i.e. such -# that the halo does not overrun the boundary of tile 6. (The halo is -# added later in another script; its function is to feed in boundary -# conditions to the regional grid.) Currently, a halo of 5 regional -# grid cells is used around the regional grid. Setting num_margin_- -# cells_T6_... to at least 10 leaves enough room for this halo. -# -#----------------------------------------------------------------------- -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~25km cells. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUS_25km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 25000.0 - ESGgrid_DELY: 25000.0 - ESGgrid_NX: 219 - ESGgrid_NY: 131 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 180 - LAYOUT_X: 5 - LAYOUT_Y: 2 - BLOCKSIZE: 40 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 2 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 217 - WRTCMP_ny: 128 - WRTCMP_lon_lwr_left: -122.719528 - WRTCMP_lat_lwr_left: 21.138123 - WRTCMP_dx: 25000.0 - WRTCMP_dy: 25000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~25km cells that can be initialized from the HRRR. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUScompact_25km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 25000.0 - ESGgrid_DELY: 25000.0 - ESGgrid_NX: 202 - ESGgrid_NY: 116 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 180 - LAYOUT_X: 5 - LAYOUT_Y: 2 - BLOCKSIZE: 40 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 2 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 199 - WRTCMP_ny: 111 - WRTCMP_lon_lwr_left: -121.23349066 - WRTCMP_lat_lwr_left: 23.41731593 - WRTCMP_dx: 25000.0 - WRTCMP_dy: 25000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~13km cells. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUS_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 420 - ESGgrid_NY: 252 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 75 - LAYOUT_X: 16 - LAYOUT_Y: 10 - BLOCKSIZE: 32 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 10 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 416 - WRTCMP_ny: 245 - WRTCMP_lon_lwr_left: -122.719528 - WRTCMP_lat_lwr_left: 21.138123 - WRTCMP_dx: 13000.0 - WRTCMP_dy: 13000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~13km cells that can be initialized from the HRRR. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUScompact_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 396 - ESGgrid_NY: 232 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 75 - LAYOUT_X: 16 - LAYOUT_Y: 10 - BLOCKSIZE: 32 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 16 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 393 - WRTCMP_ny: 225 - WRTCMP_lon_lwr_left: -121.70231097 - WRTCMP_lat_lwr_left: 22.57417972 - WRTCMP_dx: 13000.0 - WRTCMP_dy: 13000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~3km cells. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUS_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 1820 - ESGgrid_NY: 1092 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 36 - LAYOUT_X: 28 - LAYOUT_Y: 28 - BLOCKSIZE: 28 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 28 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 1799 - WRTCMP_ny: 1059 - WRTCMP_lon_lwr_left: -122.719528 - WRTCMP_lat_lwr_left: 21.138123 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~3km cells that can be initialized from the HRRR. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUScompact_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 1748 - ESGgrid_NY: 1038 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 36 - LAYOUT_X: 30 - LAYOUT_Y: 16 - BLOCKSIZE: 32 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 16 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 1746 - WRTCMP_ny: 1014 - WRTCMP_lon_lwr_left: -122.17364391 - WRTCMP_lat_lwr_left: 21.88588562 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS SUBCONUS domain with ~3km cells. -# -#----------------------------------------------------------------------- -# -"RRFS_SUBCONUS_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 35.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 840 - ESGgrid_NY: 600 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 36 - LAYOUT_X: 30 - LAYOUT_Y: 24 - BLOCKSIZE: 35 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 24 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 35.0 - WRTCMP_stdlat1: 35.0 - WRTCMP_stdlat2: 35.0 - WRTCMP_nx: 837 - WRTCMP_ny: 595 - WRTCMP_lon_lwr_left: -109.97410429 - WRTCMP_lat_lwr_left: 26.31459843 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# A subconus domain over Indianapolis, Indiana with ~3km cells. This is -# mostly for testing on a 3km grid with a much small number of cells than -# on the full CONUS. -# -#----------------------------------------------------------------------- -# -"SUBCONUS_Ind_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -86.16 - ESGgrid_LAT_CTR: 39.77 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 200 - ESGgrid_NY: 200 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 36 - LAYOUT_X: 5 - LAYOUT_Y: 5 - BLOCKSIZE: 40 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 5 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -86.16 - WRTCMP_cen_lat: 39.77 - WRTCMP_stdlat1: 39.77 - WRTCMP_stdlat2: 39.77 - WRTCMP_nx: 197 - WRTCMP_ny: 197 - WRTCMP_lon_lwr_left: -89.47120417 - WRTCMP_lat_lwr_left: 37.07809642 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS Alaska domain with ~13km cells. -# -# Note: -# This grid has not been thoroughly tested (as of 20201027). -# -#----------------------------------------------------------------------- -# -"RRFS_AK_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -161.5 - ESGgrid_LAT_CTR: 63.0 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 320 - ESGgrid_NY: 240 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 10 - LAYOUT_X: 16 - LAYOUT_Y: 12 - BLOCKSIZE: 40 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 12 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -161.5 - WRTCMP_cen_lat: 63.0 - WRTCMP_stdlat1: 63.0 - WRTCMP_stdlat2: 63.0 - WRTCMP_nx: 318 - WRTCMP_ny: 234 - WRTCMP_lon_lwr_left: 172.23339164 - WRTCMP_lat_lwr_left: 45.77691870 - WRTCMP_dx: 13000.0 - WRTCMP_dy: 13000.0 -# -#----------------------------------------------------------------------- -# -# The RRFS Alaska domain with ~3km cells. -# -# Note: -# This grid has not been thoroughly tested (as of 20201027). -# -#----------------------------------------------------------------------- -# -"RRFS_AK_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -161.5 - ESGgrid_LAT_CTR: 63.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 1380 - ESGgrid_NY: 1020 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 10 - LAYOUT_X: 30 - LAYOUT_Y: 17 - BLOCKSIZE: 40 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 17 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -161.5 - WRTCMP_cen_lat: 63.0 - WRTCMP_stdlat1: 63.0 - WRTCMP_stdlat2: 63.0 - WRTCMP_nx: 1379 - WRTCMP_ny: 1003 - WRTCMP_lon_lwr_left: -187.89737923 - WRTCMP_lat_lwr_left: 45.84576053 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# The WoFS domain with ~3km cells. -# -# Note: -# The WoFS domain will generate a 301 x 301 output grid (WRITE COMPONENT) and -# will eventually be movable (ESGgrid_LON_CTR/ESGgrid_LAT_CTR). A python script -# python_utils/fv3write_parms_lambert will be useful to determine -# WRTCMP_lon_lwr_left and WRTCMP_lat_lwr_left locations (only for Lambert map -# projection currently) of the quilting output when the domain location is -# moved. Later, it should be integrated into the workflow. -# -#----------------------------------------------------------------------- -# -"WoFS_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 361 - ESGgrid_NY: 361 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 20 - LAYOUT_X: 18 - LAYOUT_Y: 12 - BLOCKSIZE: 30 - QUILTING: - WRTCMP_write_groups: "1" - WRTCMP_write_tasks_per_group: 12 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 301 - WRTCMP_ny: 301 - WRTCMP_lon_lwr_left: -102.3802487 - WRTCMP_lat_lwr_left: 34.3407918 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# A CONUS domain of GFDLgrid type with ~25km cells. -# -# Note: -# This grid is larger than the HRRRX domain and thus cannot be initialized -# using the HRRRX. -# -#----------------------------------------------------------------------- -# -"CONUS_25km_GFDLgrid": - GRID_GEN_METHOD: "GFDLgrid" - GFDLgrid_LON_T6_CTR: -97.5 - GFDLgrid_LAT_T6_CTR: 38.5 - GFDLgrid_STRETCH_FAC: 1.4 - GFDLgrid_NUM_CELLS: 96 - GFDLgrid_REFINE_RATIO: 3 - num_margin_cells_T6_left: 12 - GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: 13 - num_margin_cells_T6_right: 12 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: 84 - num_margin_cells_T6_bottom: 16 - GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: 17 - num_margin_cells_T6_top: 16 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: 80 - GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: True - DT_ATMOS: 225 - LAYOUT_X: 6 - LAYOUT_Y: 4 - BLOCKSIZE: 36 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 4 - WRTCMP_output_grid: "rotated_latlon" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_lon_lwr_left: -24.40085141 - WRTCMP_lat_lwr_left: -19.65624142 - WRTCMP_lon_upr_rght: 24.40085141 - WRTCMP_lat_upr_rght: 19.65624142 - WRTCMP_dlon: 0.22593381 - WRTCMP_dlat: 0.22593381 -# -#----------------------------------------------------------------------- -# -# A CONUS domain of GFDLgrid type with ~3km cells. -# -# Note: -# This grid is larger than the HRRRX domain and thus cannot be initialized -# using the HRRRX. -# -#----------------------------------------------------------------------- -# -"CONUS_3km_GFDLgrid": - GRID_GEN_METHOD: "GFDLgrid" - GFDLgrid_LON_T6_CTR: -97.5 - GFDLgrid_LAT_T6_CTR: 38.5 - GFDLgrid_STRETCH_FAC: 1.5 - GFDLgrid_NUM_CELLS: 768 - GFDLgrid_REFINE_RATIO: 3 - num_margin_cells_T6_left: 69 - GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: 70 - num_margin_cells_T6_right: 69 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: 699 - num_margin_cells_T6_bottom: 164 - GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: 165 - num_margin_cells_T6_top: 164 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: 604 - GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: True - DT_ATMOS: 36 - LAYOUT_X: 30 - LAYOUT_Y: 22 - BLOCKSIZE: 35 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 22 - WRTCMP_output_grid: "rotated_latlon" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_lon_lwr_left: -25.23144805 - WRTCMP_lat_lwr_left: -15.82130419 - WRTCMP_lon_upr_rght: 25.23144805 - WRTCMP_lat_upr_rght: 15.82130419 - WRTCMP_dlon: 0.02665763 - WRTCMP_dlat: 0.02665763 -# -#----------------------------------------------------------------------- -# -# EMC's Alaska grid. -# -#----------------------------------------------------------------------- -# -"EMC_AK": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -153.0 - ESGgrid_LAT_CTR: 61.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 1344 # Supergrid value 2704 - ESGgrid_NY: 1152 # Supergrid value 2320 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 18 - LAYOUT_X: 28 - LAYOUT_Y: 16 - BLOCKSIZE: 24 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 24 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -153.0 - WRTCMP_cen_lat: 61.0 - WRTCMP_stdlat1: 61.0 - WRTCMP_stdlat2: 61.0 - WRTCMP_nx: 1344 - WRTCMP_ny: 1152 - WRTCMP_lon_lwr_left: -177.0 - WRTCMP_lat_lwr_left: 42.5 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# EMC's Hawaii grid. -# -#----------------------------------------------------------------------- -# -"EMC_HI": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -157.0 - ESGgrid_LAT_CTR: 20.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 432 # Supergrid value 880 - ESGgrid_NY: 360 # Supergrid value 736 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 18 - LAYOUT_X: 8 - LAYOUT_Y: 8 - BLOCKSIZE: 27 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 8 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -157.0 - WRTCMP_cen_lat: 20.0 - WRTCMP_stdlat1: 20.0 - WRTCMP_stdlat2: 20.0 - WRTCMP_nx: 420 - WRTCMP_ny: 348 - WRTCMP_lon_lwr_left: -162.8 - WRTCMP_lat_lwr_left: 15.2 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# EMC's Puerto Rico grid. -# -#----------------------------------------------------------------------- -# -"EMC_PR": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -69.0 - ESGgrid_LAT_CTR: 18.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 576 # Supergrid value 1168 - ESGgrid_NY: 432 # Supergrid value 880 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 18 - LAYOUT_X: 16 - LAYOUT_Y: 8 - BLOCKSIZE: 24 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 24 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -69.0 - WRTCMP_cen_lat: 18.0 - WRTCMP_stdlat1: 18.0 - WRTCMP_stdlat2: 18.0 - WRTCMP_nx: 576 - WRTCMP_ny: 432 - WRTCMP_lon_lwr_left: -77 - WRTCMP_lat_lwr_left: 12 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# EMC's Guam grid. -# -#----------------------------------------------------------------------- -# -"EMC_GU": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: 146.0 - ESGgrid_LAT_CTR: 15.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 432 # Supergrid value 880 - ESGgrid_NY: 360 # Supergrid value 736 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 18 - LAYOUT_X: 16 - LAYOUT_Y: 12 - BLOCKSIZE: 27 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 24 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: 146.0 - WRTCMP_cen_lat: 15.0 - WRTCMP_stdlat1: 15.0 - WRTCMP_stdlat2: 15.0 - WRTCMP_nx: 420 - WRTCMP_ny: 348 - WRTCMP_lon_lwr_left: 140 - WRTCMP_lat_lwr_left: 10 - WRTCMP_dx: 3000.0 - WRTCMP_dy: 3000.0 -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 25 km. -# -#----------------------------------------------------------------------- -# -"GSL_HAFSV0.A_25km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -62.0 - ESGgrid_LAT_CTR: 22.0 - ESGgrid_DELX: 25000.0 - ESGgrid_DELY: 25000.0 - ESGgrid_NX: 345 - ESGgrid_NY: 230 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 300 - LAYOUT_X: 5 - LAYOUT_Y: 5 - BLOCKSIZE: 6 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 32 - WRTCMP_output_grid: "regional_latlon" - WRTCMP_cen_lon: -62.0 - WRTCMP_cen_lat: 25.0 - WRTCMP_lon_lwr_left: -114.5 - WRTCMP_lat_lwr_left: -5.0 - WRTCMP_lon_upr_rght: -9.5 - WRTCMP_lat_upr_rght: 55.0 - WRTCMP_dlon: 0.25 - WRTCMP_dlat: 0.25 -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 13 km. -# -#----------------------------------------------------------------------- -# -"GSL_HAFSV0.A_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -62.0 - ESGgrid_LAT_CTR: 22.0 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 665 - ESGgrid_NY: 444 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 180 - LAYOUT_X: 19 - LAYOUT_Y: 12 - BLOCKSIZE: 35 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 32 - WRTCMP_output_grid: "regional_latlon" - WRTCMP_cen_lon: -62.0 - WRTCMP_cen_lat: 25.0 - WRTCMP_lon_lwr_left: -114.5 - WRTCMP_lat_lwr_left: -5.0 - WRTCMP_lon_upr_rght: -9.5 - WRTCMP_lat_upr_rght: 55.0 - WRTCMP_dlon: 0.13 - WRTCMP_dlat: 0.13 -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 3 km. -# -#----------------------------------------------------------------------- -# -"GSL_HAFSV0.A_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -62.0 - ESGgrid_LAT_CTR: 22.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 2880 - ESGgrid_NY: 1920 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 40 - LAYOUT_X: 32 - LAYOUT_Y: 24 - BLOCKSIZE: 32 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 32 - WRTCMP_output_grid: "regional_latlon" - WRTCMP_cen_lon: -62.0 - WRTCMP_cen_lat: 25.0 - WRTCMP_lon_lwr_left: -114.5 - WRTCMP_lat_lwr_left: -5.0 - WRTCMP_lon_upr_rght: -9.5 - WRTCMP_lat_upr_rght: 55.0 - WRTCMP_dlon: 0.03 - WRTCMP_dlat: 0.03 -# -#----------------------------------------------------------------------- -# -# 50-km HRRR Alaska grid. -# -#----------------------------------------------------------------------- -# -"GSD_HRRR_AK_50km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -163.5 - ESGgrid_LAT_CTR: 62.8 - ESGgrid_DELX: 50000.0 - ESGgrid_DELY: 50000.0 - ESGgrid_NX: 74 - ESGgrid_NY: 51 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 600 - LAYOUT_X: 2 - LAYOUT_Y: 3 - BLOCKSIZE: 37 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 1 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -163.5 - WRTCMP_cen_lat: 62.8 - WRTCMP_stdlat1: 62.8 - WRTCMP_stdlat2: 62.8 - WRTCMP_nx: 70 - WRTCMP_ny: 45 - WRTCMP_lon_lwr_left: 172.0 - WRTCMP_lat_lwr_left: 49.0 - WRTCMP_dx: 50000.0 - WRTCMP_dy: 50000.0 -# -#----------------------------------------------------------------------- -# -# 25-km HRRR grid for testing Online-CMAQ. -# -#----------------------------------------------------------------------- -# -"GSD_HRRR_25km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 25000.0 - ESGgrid_DELY: 25000.0 - ESGgrid_NX: 200 - ESGgrid_NY: 110 - ESGgrid_PAZI: 180.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 300 - LAYOUT_X: 10 - LAYOUT_Y: 11 - BLOCKSIZE: 2 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 10 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 191 - WRTCMP_ny: 97 - WRTCMP_lon_lwr_left: -120.72962370 - WRTCMP_lat_lwr_left: 25.11648583 - WRTCMP_dx: 25000.0 - WRTCMP_dy: 25000.0 -# -#----------------------------------------------------------------------- -# -# 13-km AQM NA grid for Online-CMAQ. -# -#----------------------------------------------------------------------- -# -"AQM_NA_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -118.0 - ESGgrid_LAT_CTR: 50.0 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 800 - ESGgrid_NY: 544 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 180 - LAYOUT_X: 50 - LAYOUT_Y: 34 - BLOCKSIZE: 16 - QUILTING: - WRTCMP_write_groups: 2 - WRTCMP_write_tasks_per_group: 46 - WRTCMP_output_grid: "rotated_latlon" - WRTCMP_cen_lon: -118.0 - WRTCMP_cen_lat: 50.0 - WRTCMP_lon_lwr_left: -45.25 - WRTCMP_lat_lwr_left: -28.5 - WRTCMP_lon_upr_rght: 45.25 - WRTCMP_lat_upr_rght: 28.5 - WRTCMP_dlon: 0.116908139 - WRTCMP_dlat: 0.116908139 -# -#----------------------------------------------------------------------- -# -# Emulation of GSD's RAP domain with ~13km cell size. -# -#----------------------------------------------------------------------- -# -"RRFS_NA_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -112.5 - ESGgrid_LAT_CTR: 55.0 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 912 - ESGgrid_NY: 623 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 75 - LAYOUT_X: 16 - LAYOUT_Y: 16 - BLOCKSIZE: 30 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 16 - WRTCMP_output_grid: "rotated_latlon" - WRTCMP_cen_lon: -113.0 - WRTCMP_cen_lat: 55.0 - WRTCMP_lon_lwr_left: -61.0 - WRTCMP_lat_lwr_left: -37.0 - WRTCMP_lon_upr_rght: 61.0 - WRTCMP_lat_upr_rght: 37.0 - WRTCMP_dlon: 0.1169081 - WRTCMP_dlat: 0.1169081 -# -#----------------------------------------------------------------------- -# -# Future operational RRFS domain with ~3km cell size. -# -#----------------------------------------------------------------------- -# -"RRFS_NA_3km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -112.5 - ESGgrid_LAT_CTR: 55.0 - ESGgrid_DELX: 3000.0 - ESGgrid_DELY: 3000.0 - ESGgrid_NX: 3950 - ESGgrid_NY: 2700 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 36 - LAYOUT_X: 20 # 40 - EMC operational configuration - LAYOUT_Y: 35 # 45 - EMC operational configuration - BLOCKSIZE: 28 - QUILTING: - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 144 - WRTCMP_output_grid: "rotated_latlon" - WRTCMP_cen_lon: -113.0 - WRTCMP_cen_lat: 55.0 - WRTCMP_lon_lwr_left: -61.0 - WRTCMP_lat_lwr_left: -37.0 - WRTCMP_lon_upr_rght: 61.0 - WRTCMP_lat_upr_rght: 37.0 - WRTCMP_dlon: 0.025 - WRTCMP_dlat: 0.025 - diff --git a/ush/run_srw_tests.py b/ush/run_srw_tests.py deleted file mode 100755 index 9e77be14b8..0000000000 --- a/ush/run_srw_tests.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 - -import os -import subprocess -import time -import argparse - -# Python class to handle the launching of a set of SRW tests -# The expectation is to have a "clean" experiment directory with only new experiments -# that are ready to run (e.g. no _old* experiments left around from previous tests -# This script takes only one parameter "-e" or "--exptdir" which points to the -# expt_basedir specified when the run_WE2E_tests.sh is run to set up the tests. -# The script will work sequentially through each of the test directories and -# launch the workflow for each with a call to launch_FV3LAM_wflow.sh -# After the initial launch, the checkTests method is called to monitor the -# status of each test and call the launch_FV3LAM_wflow.sh script repeatedly -# in each uncompleted workflow until all workflows are done. -class SRWTest: - def __init__(self, exptdir): - self.exptdir=exptdir - # Get a list of test directories - cmdstring="find {} -maxdepth 1 -type d | tail -n+2".format(self.exptdir) - status= subprocess.check_output(cmdstring,shell=True).strip().decode('utf-8') - # Turn the stdout from the shell command into a list - self.testDirectories = status.split("\n") - self.launchcmd = "./launch_FV3LAM_wflow.sh >& /dev/null" - # Loop through each of the test directories and launch the initial jobs in the workflow - for testD in self.testDirectories: - print("starting {} workflow".format(testD)) - os.chdir(testD) - os.system(self.launchcmd) - os.chdir(self.exptdir) - # Now start monitoring the workflows - self.checkTests() - - def checkTests(self): - while(len(self.testDirectories) > 0): - # Only continue running launch command for workflows that aren't complete - # so check for any that have failed or completed and cull them from the list - cmdstring="grep -L 'wflow_status =' */log.launch_FV3LAM_wflow | xargs dirname" - try: - status= subprocess.check_output(cmdstring,shell=True).strip().decode('utf-8') - except: - print("Tests have all completed") - return - self.testDirectories = status.split("\n") - # continue looping through directories - for testD in self.testDirectories: - os.chdir(testD) - os.system(self.launchcmd) - os.chdir(self.exptdir) - print("calling launch_FV3LAM_wflow.sh from {}".format(testD)) - time.sleep(5.0) - time.sleep(30.0) - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Run through a set of SRW WE2E tests until they are complete') - parser.add_argument('-e','--exptdir', help='directory where experiments have been staged', required=False,default=os.getcwd()) - args = vars(parser.parse_args()) - - test = SRWTest(args['exptdir']) - diff --git a/ush/set_FV3nml_ens_stoch_seeds.py b/ush/set_FV3nml_ens_stoch_seeds.py deleted file mode 100755 index 08ed944f46..0000000000 --- a/ush/set_FV3nml_ens_stoch_seeds.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import argparse -import unittest -from textwrap import dedent -from datetime import datetime - -from python_utils import ( - print_input_args, - print_info_msg, - print_err_msg_exit, - date_to_str, - mkdir_vrfy, - cp_vrfy, - cd_vrfy, - str_to_type, - import_vars, - set_env_var, - define_macos_utilities, - cfg_to_yaml_str, - load_shell_config, - flatten_dict, -) - -from set_namelist import set_namelist - - -def set_FV3nml_ens_stoch_seeds(cdate): - """ - This function, for an ensemble-enabled experiment - (i.e. for an experiment for which the workflow configuration variable - DO_ENSEMBLE has been set to "TRUE"), creates new namelist files with - unique stochastic "seed" parameters, using a base namelist file in the - ${EXPTDIR} directory as a template. These new namelist files are stored - within each member directory housed within each cycle directory. Files - of any two ensemble members differ only in their stochastic "seed" - parameter values. These namelist files are generated when this file is - called as part of the TN_RUN_FCST task. - - Args: - cdate - Returns: - None - """ - - print_input_args(locals()) - - # import all environment variables - import_vars() - - # - # ----------------------------------------------------------------------- - # - # For a given cycle and member, generate a namelist file with unique - # seed values. - # - # ----------------------------------------------------------------------- - # - fv3_nml_ensmem_fp = f"{os.getcwd()}{os.sep}{FV3_NML_FN}" - - ensmem_num = ENSMEM_INDX - - cdate_i = int(cdate.strftime("%Y%m%d%H")) - - settings = {} - nam_stochy_dict = {} - - if DO_SPPT: - iseed_sppt = cdate_i * 1000 + ensmem_num * 10 + 1 - nam_stochy_dict.update({"iseed_sppt": iseed_sppt}) - - if DO_SHUM: - iseed_shum = cdate_i * 1000 + ensmem_num * 10 + 2 - nam_stochy_dict.update({"iseed_shum": iseed_shum}) - - if DO_SKEB: - iseed_skeb = cdate_i * 1000 + ensmem_num * 10 + 3 - nam_stochy_dict.update({"iseed_skeb": iseed_skeb}) - - settings["nam_stochy"] = nam_stochy_dict - - if DO_SPP: - num_iseed_spp = len(ISEED_SPP) - iseed_spp = [None] * num_iseed_spp - for i in range(num_iseed_spp): - iseed_spp[i] = cdate_i * 1000 + ensmem_num * 10 + ISEED_SPP[i] - - settings["nam_sppperts"] = {"iseed_spp": iseed_spp} - else: - settings["nam_sppperts"] = {} - - if DO_LSM_SPP: - iseed_lsm_spp = cdate_i * 1000 + ensmem_num * 10 + 9 - - settings["nam_sppperts"] = {"iseed_lndp": [iseed_lsm_spp]} - - settings_str = cfg_to_yaml_str(settings) - - print_info_msg( - dedent( - f""" - The variable 'settings' specifying seeds in '{FV3_NML_FP}' - has been set as follows: - - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, - ) - - try: - set_namelist( - ["-q", "-n", FV3_NML_FP, "-u", settings_str, "-o", fv3_nml_ensmem_fp] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file: - fv3_nml_ensmem_fp = '{fv3_nml_ensmem_fp}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str - ) - - -def parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser( - description="Creates stochastic seeds for an ensemble experiment." - ) - - parser.add_argument("-c", "--cdate", dest="cdate", required=True, help="Date.") - - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_ens_stoch_seeds(str_to_type(args.cdate)) - - -class Testing(unittest.TestCase): - def test_set_FV3nml_ens_stoch_seeds(self): - set_FV3nml_ens_stoch_seeds(cdate=self.cdate) - - def setUp(self): - define_macos_utilities() - set_env_var("DEBUG", True) - set_env_var("VERBOSE", True) - self.cdate = datetime(2021, 1, 1) - USHdir = os.path.dirname(os.path.abspath(__file__)) - PARMdir = os.path.join(USHdir, "..", "parm") - EXPTDIR = os.path.join(USHdir, "test_data", "expt") - mkdir_vrfy("-p", EXPTDIR) - cp_vrfy( - os.path.join(PARMdir, "input.nml.FV3"), - os.path.join(EXPTDIR, "input.nml"), - ) - for i in range(2): - mkdir_vrfy( - "-p", - os.path.join( - EXPTDIR, - f"{date_to_str(self.cdate,format='%Y%m%d%H')}{os.sep}mem{i+1}", - ), - ) - - cd_vrfy( - f"{EXPTDIR}{os.sep}{date_to_str(self.cdate,format='%Y%m%d%H')}{os.sep}mem2" - ) - - set_env_var("USHdir", USHdir) - set_env_var("ENSMEM_INDX", 2) - set_env_var("FV3_NML_FN", "input.nml") - set_env_var("FV3_NML_FP", os.path.join(EXPTDIR, "input.nml")) - set_env_var("DO_SHUM", True) - set_env_var("DO_SKEB", True) - set_env_var("DO_SPPT", True) - set_env_var("DO_SPP", True) - set_env_var("DO_LSM_SPP", True) - ISEED_SPP = [4, 5, 6, 7, 8] - set_env_var("ISEED_SPP", ISEED_SPP) diff --git a/ush/set_FV3nml_sfc_climo_filenames.py b/ush/set_FV3nml_sfc_climo_filenames.py deleted file mode 100755 index 69c9e47528..0000000000 --- a/ush/set_FV3nml_sfc_climo_filenames.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python3 - -import unittest -import os -import sys -import argparse -from textwrap import dedent - -from python_utils import ( - print_input_args, - print_info_msg, - print_err_msg_exit, - check_var_valid_value, - mv_vrfy, - mkdir_vrfy, - cp_vrfy, - rm_vrfy, - import_vars, - set_env_var, - load_config_file, - load_shell_config, - flatten_dict, - define_macos_utilities, - find_pattern_in_str, - cfg_to_yaml_str, -) - -from set_namelist import set_namelist - - -def set_FV3nml_sfc_climo_filenames(): - """ - This function sets the values of the variables in - the forecast model's namelist file that specify the paths to the surface - climatology files on the FV3LAM native grid (which are either pregenerated - or created by the TN_MAKE_SFC_CLIMO task). Note that the workflow - generation scripts create symlinks to these surface climatology files - in the FIXlam directory, and the values in the namelist file that get - set by this function are relative or full paths to these links. - - Args: - None - Returns: - None - """ - - # import all environment variables - import_vars() - - # fixed file mapping variables - fixed_cfg = load_config_file(os.path.join(PARMdir, "fixed_files_mapping.yaml")) - IMPORTS = ["SFC_CLIMO_FIELDS", "FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"] - import_vars(dictionary=flatten_dict(fixed_cfg), env_vars=IMPORTS) - - # The regular expression regex_search set below will be used to extract - # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING - # the name of the namelist variable to set and the corresponding surface - # climatology field from which to form the name of the surface climatology file - regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" - - # Set the suffix of the surface climatology files. - suffix = "tileX.nc" - - # create yaml-complaint string - settings = {} - - dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") - if DO_ENSEMBLE == "TRUE": - dummy_run_dir += os.sep + "any_ensmem" - - namsfc_dict = {} - for mapping in FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - sfc_climo_field_name = tup[1] - - check_var_valid_value(sfc_climo_field_name, SFC_CLIMO_FIELDS) - - fp = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") - - namsfc_dict[nml_var_name] = fp - - settings["namsfc_dict"] = namsfc_dict - settings_str = cfg_to_yaml_str(settings) - - print_info_msg( - dedent( - f""" - The variable 'settings' specifying values of the namelist variables - has been set as follows:\n - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, - ) - - # Rename the FV3 namelist and call set_namelist - fv3_nml_base_fp = f"{FV3_NML_FP}.base" - mv_vrfy(f"{FV3_NML_FP} {fv3_nml_base_fp}") - - try: - set_namelist( - ["-q", "-n", fv3_nml_base_fp, "-u", settings_str, "-o", FV3_NML_FP] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_nml_base_fp = '{fv3_nml_base_fp}' - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str - ) - - rm_vrfy(f"{fv3_nml_base_fp}") - - -def parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Set surface climatology fields.") - - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_sfc_climo_filenames() - - -class Testing(unittest.TestCase): - def test_set_FV3nml_sfc_climo_filenames(self): - set_FV3nml_sfc_climo_filenames() - - def setUp(self): - define_macos_utilities() - set_env_var("DEBUG", True) - set_env_var("VERBOSE", True) - USHdir = os.path.dirname(os.path.abspath(__file__)) - PARMdir = os.path.join(USHdir, "..", "parm") - EXPTDIR = os.path.join(USHdir, "test_data", "expt") - FIXlam = os.path.join(EXPTDIR, "fix_lam") - mkdir_vrfy("-p", FIXlam) - mkdir_vrfy("-p", EXPTDIR) - cp_vrfy( - os.path.join(PARMdir, "input.nml.FV3"), - os.path.join(EXPTDIR, "input.nml"), - ) - set_env_var("PARMdir", PARMdir) - set_env_var("EXPTDIR", EXPTDIR) - set_env_var("FIXlam", FIXlam) - set_env_var("DO_ENSEMBLE", False) - set_env_var("CRES", "C3357") - set_env_var("FV3_NML_FP", os.path.join(EXPTDIR, "input.nml")) diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py deleted file mode 100755 index cf8ddb9ff8..0000000000 --- a/ush/set_gridparams_ESGgrid.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python3 - -import os -import unittest -from datetime import datetime, timedelta - -from python_utils import ( - import_vars, - set_env_var, - print_input_args, - load_config_file, - flatten_dict, -) - - -def set_gridparams_ESGgrid( - lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, pazi, constants -): - """Sets the parameters for a grid that is to be generated using the "ESGgrid" - grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). - - Args: - lon_ctr - lat_ctr - nx - ny - halo_width - delx - dely - pazi - constants: dictionary of SRW constants - Returns: - Tuple of inputs, and 4 outputs (see return statement) - """ - - print_input_args(locals()) - - # get constants - RADIUS_EARTH = constants["RADIUS_EARTH"] - DEGS_PER_RADIAN = constants["DEGS_PER_RADIAN"] - - # - # ----------------------------------------------------------------------- - # - # For a ESGgrid-type grid, the orography filtering is performed by pass- - # ing to the orography filtering the parameters for an "equivalent" glo- - # bal uniform cubed-sphere grid. These are the parameters that a global - # uniform cubed-sphere grid needs to have in order to have a nominal - # grid cell size equal to that of the (average) cell size on the region- - # al grid. These globally-equivalent parameters include a resolution - # (in units of number of cells in each of the two horizontal directions) - # and a stretch factor. The equivalent resolution is calculated in the - # script that generates the grid, and the stretch factor needs to be set - # to 1 because we are considering an equivalent globally UNIFORM grid. - # However, it turns out that with a non-symmetric regional grid (one in - # which nx is not equal to ny), setting stretch_factor to 1 fails be- - # cause the orography filtering program is designed for a global cubed- - # sphere grid and thus assumes that nx and ny for a given tile are equal - # when stretch_factor is exactly equal to 1. - # ^^-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1. - # It turns out that the program will work if we set stretch_factor to a - # value that is not exactly 1. This is what we do below. - # - return { - "LON_CTR": lon_ctr, - "LAT_CTR": lat_ctr, - "NX": nx, - "NY": ny, - "PAZI": pazi, - "NHW": halo_width, - "STRETCH_FAC": 0.999, - "DEL_ANGLE_X_SG": (delx / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN, - "DEL_ANGLE_Y_SG": (dely / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN, - "NEG_NX_OF_DOM_WITH_WIDE_HALO": int(-(nx + 2 * halo_width)), - "NEG_NY_OF_DOM_WITH_WIDE_HALO": int(-(ny + 2 * halo_width)), - } - - -class Testing(unittest.TestCase): - def test_set_gridparams_ESGgrid(self): - - grid_parms = set_gridparams_ESGgrid( - lon_ctr=-97.5, - lat_ctr=38.5, - nx=1748, - ny=1038, - pazi=0.0, - halo_width=6, - delx=3000.0, - dely=3000.0, - constants=dict( - RADIUS_EARTH=6371200.0, - DEGS_PER_RADIAN=57.29577951308232087679, - ), - ) - - self.assertEqual( - list(grid_parms.values()), - [ - -97.5, - 38.5, - 1748, - 1038, - 0.0, - 6, - 0.999, - 0.013489400626196555, - 0.013489400626196555, - -1760, - -1050, - ], - ) - - def setUp(self): - set_env_var("DEBUG", False) - set_env_var("VERBOSE", False) diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py deleted file mode 100755 index 33c39439f4..0000000000 --- a/ush/set_gridparams_GFDLgrid.py +++ /dev/null @@ -1,480 +0,0 @@ -#!/usr/bin/env python3 - -import os -import unittest - -from python_utils import ( - import_vars, - set_env_var, - print_input_args, - print_info_msg, - print_err_msg_exit, - load_config_file, - flatten_dict, -) - - -def prime_factors(n): - i = 2 - factors = [] - while i * i <= n: - if n % i: - i += 1 - else: - n //= i - factors.append(i) - if n > 1: - factors.append(n) - return factors - - -def set_gridparams_GFDLgrid( - lon_of_t6_ctr, - lat_of_t6_ctr, - res_of_t6g, - stretch_factor, - refine_ratio_t6g_to_t7g, - istart_of_t7_on_t6g, - iend_of_t7_on_t6g, - jstart_of_t7_on_t6g, - jend_of_t7_on_t6g, - verbose, - nh4, -): - """Sets the parameters for a grid that is to be generated using the "GFDLgrid" - grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). - - Args: - lon_of_t6_ctr - lat_of_t6_ctr - res_of_t6g - stretch_factor - refine_ratio_t6g_to_t7g - istart_of_t7_on_t6g - iend_of_t7_on_t6g - jstart_of_t7_on_t6g - jend_of_t7_on_t6g - verbose - nh4 - Returns: - Tuple of inputs and outputs (see return statement) - """ - - print_input_args(locals()) - - # - # ----------------------------------------------------------------------- - # - # To simplify the grid setup, we require that tile 7 be centered on tile - # 6. Note that this is not really a restriction because tile 6 can al- - # ways be moved so that it is centered on tile 7 [the location of tile 6 - # doesn't really matter because for a regional setup, the forecast model - # will only run on tile 7 (not on tiles 1-6)]. - # - # We now check that tile 7 is centered on tile 6 by checking (1) that - # the number of cells (on tile 6) between the left boundaries of these - # two tiles is equal to that between their right boundaries and (2) that - # the number of cells (on tile 6) between the bottom boundaries of these - # two tiles is equal to that between their top boundaries. If not, we - # print out an error message and exit. If so, we set the longitude and - # latitude of the center of tile 7 to those of tile 6 and continue. - # - # ----------------------------------------------------------------------- - # - - nx_of_t6_on_t6g = res_of_t6g - ny_of_t6_on_t6g = res_of_t6g - - num_left_margin_cells_on_t6g = istart_of_t7_on_t6g - 1 - num_right_margin_cells_on_t6g = nx_of_t6_on_t6g - iend_of_t7_on_t6g - - # This if-statement can hopefully be removed once EMC agrees to make their - # GFDLgrid type grids (tile 7) symmetric about tile 6. - num_bot_margin_cells_on_t6g = jstart_of_t7_on_t6g - 1 - num_top_margin_cells_on_t6g = ny_of_t6_on_t6g - jend_of_t7_on_t6g - - # This if-statement can hopefully be removed once EMC agrees to make their - # GFDLgrid type grids (tile 7) symmetric about tile 6. - if num_bot_margin_cells_on_t6g != num_top_margin_cells_on_t6g: - print_err_msg_exit( - f""" - In order for tile 7 to be centered in the y direction on tile 6, the y- - direction tile 6 cell indices at which tile 7 starts and ends (given by - jstart_of_t7_on_t6g and jend_of_t7_on_t6g, respectively) must be set - such that the number of tile 6 cells in the margin between the left - boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is - equal to that in the margin between their right boundaries (given by - num_right_margin_cells_on_t6g): - jstart_of_t7_on_t6g = {jstart_of_t7_on_t6g} - jend_of_t7_on_t6g = {jend_of_t7_on_t6g} - num_bot_margin_cells_on_t6g = {num_bot_margin_cells_on_t6g} - num_top_margin_cells_on_t6g = {num_top_margin_cells_on_t6g} - Note that the total number of cells in the y-direction on tile 6 is gi- - ven by: - ny_of_t6_on_t6g = {ny_of_t6_on_t6g} - Please reset jstart_of_t7_on_t6g and jend_of_t7_on_t6g and rerun.""" - ) - - lon_of_t7_ctr = lon_of_t6_ctr - lat_of_t7_ctr = lat_of_t6_ctr - # - # ----------------------------------------------------------------------- - # - # The grid generation script grid_gen_scr called below in turn calls the - # make_hgrid utility/executable to construct the regional grid. make_- - # hgrid accepts as arguments the index limits (i.e. starting and ending - # indices) of the regional grid on the supergrid of the regional grid's - # parent tile. The regional grid's parent tile is tile 6, and the su- - # pergrid of any given tile is defined as the grid obtained by doubling - # the number of cells in each direction on that tile's grid. We will - # denote these index limits by - # - # istart_of_t7_on_t6sg - # iend_of_t7_on_t6sg - # jstart_of_t7_on_t6sg - # jend_of_t7_on_t6sg - # - # The "_T6SG" suffix in these names is used to indicate that the indices - # are on the supergrid of tile 6. Recall, however, that we have as in- - # puts the index limits of the regional grid on the tile 6 grid, not its - # supergrid. These are given by - # - # istart_of_t7_on_t6g - # iend_of_t7_on_t6g - # jstart_of_t7_on_t6g - # jend_of_t7_on_t6g - # - # We can obtain the former from the latter by recalling that the super- - # grid has twice the resolution of the original grid. Thus, - # - # istart_of_t7_on_t6sg = 2*istart_of_t7_on_t6g - 1 - # iend_of_t7_on_t6sg = 2*iend_of_t7_on_t6g - # jstart_of_t7_on_t6sg = 2*jstart_of_t7_on_t6g - 1 - # jend_of_t7_on_t6sg = 2*jend_of_t7_on_t6g - # - # These are obtained assuming that grid cells on tile 6 must either be - # completely within the regional domain or completely outside of it, - # i.e. the boundary of the regional grid must coincide with gridlines - # on the tile 6 grid; it cannot cut through tile 6 cells. (Note that - # this implies that the starting indices on the tile 6 supergrid must be - # odd while the ending indices must be even; the above expressions sa- - # tisfy this requirement.) We perform these calculations next. - # - # ----------------------------------------------------------------------- - # - istart_of_t7_on_t6sg = 2 * istart_of_t7_on_t6g - 1 - iend_of_t7_on_t6sg = 2 * iend_of_t7_on_t6g - jstart_of_t7_on_t6sg = 2 * jstart_of_t7_on_t6g - 1 - jend_of_t7_on_t6sg = 2 * jend_of_t7_on_t6g - # - # ----------------------------------------------------------------------- - # - # If we simply pass to make_hgrid the index limits of the regional grid - # on the tile 6 supergrid calculated above, make_hgrid will generate a - # regional grid without a halo. To obtain a regional grid with a halo, - # we must pass to make_hgrid the index limits (on the tile 6 supergrid) - # of the regional grid including a halo. We will let the variables - # - # istart_of_t7_with_halo_on_t6sg - # iend_of_t7_with_halo_on_t6sg - # jstart_of_t7_with_halo_on_t6sg - # jend_of_t7_with_halo_on_t6sg - # - # denote these limits. The reason we include "_wide_halo" in these va- - # riable names is that the halo of the grid that we will first generate - # will be wider than the halos that are actually needed as inputs to the - # FV3LAM model (i.e. the 0-cell-wide, 3-cell-wide, and 4-cell-wide halos - # described above). We will generate the grids with narrower halos that - # the model needs later on by "shaving" layers of cells from this wide- - # halo grid. Next, we describe how to calculate the above indices. - # - # Let halo_width_on_t7g denote the width of the "wide" halo in units of number of - # grid cells on the regional grid (i.e. tile 7) that we'd like to have - # along all four edges of the regional domain (left, right, bottom, and - # top). To obtain the corresponding halo width in units of number of - # cells on the tile 6 grid -- which we denote by halo_width_on_t6g -- we simply di- - # vide halo_width_on_t7g by the refinement ratio, i.e. - # - # halo_width_on_t6g = halo_width_on_t7g/refine_ratio_t6g_to_t7g - # - # The corresponding halo width on the tile 6 supergrid is then given by - # - # halo_width_on_t6sg = 2*halo_width_on_t6g - # = 2*halo_width_on_t7g/refine_ratio_t6g_to_t7g - # - # Note that halo_width_on_t6sg must be an integer, but the expression for it de- - # rived above may not yield an integer. To ensure that the halo has a - # width of at least halo_width_on_t7g cells on the regional grid, we round up the - # result of the expression above for halo_width_on_t6sg, i.e. we redefine halo_width_on_t6sg - # to be - # - # halo_width_on_t6sg = ceil(2*halo_width_on_t7g/refine_ratio_t6g_to_t7g) - # - # where ceil(...) is the ceiling function, i.e. it rounds its floating - # point argument up to the next larger integer. Since in bash division - # of two integers returns a truncated integer and since bash has no - # built-in ceil(...) function, we perform the rounding-up operation by - # adding the denominator (of the argument of ceil(...) above) minus 1 to - # the original numerator, i.e. by redefining halo_width_on_t6sg to be - # - # halo_width_on_t6sg = (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g - # - # This trick works when dividing one positive integer by another. - # - # In order to calculate halo_width_on_t6g using the above expression, we must - # first specify halo_width_on_t7g. Next, we specify an initial value for it by - # setting it to one more than the largest-width halo that the model ac- - # tually needs, which is NH4. We then calculate halo_width_on_t6sg using the - # above expression. Note that these values of halo_width_on_t7g and halo_width_on_t6sg will - # likely not be their final values; their final values will be calcula- - # ted later below after calculating the starting and ending indices of - # the regional grid with wide halo on the tile 6 supergrid and then ad- - # justing the latter to satisfy certain conditions. - # - # ----------------------------------------------------------------------- - # - halo_width_on_t7g = nh4 + 1 - halo_width_on_t6sg = ( - 2 * halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1 - ) / refine_ratio_t6g_to_t7g - # - # ----------------------------------------------------------------------- - # - # With an initial value of halo_width_on_t6sg now available, we can obtain the - # tile 6 supergrid index limits of the regional domain (including the - # wide halo) from the index limits for the regional domain without a ha- - # lo by simply subtracting halo_width_on_t6sg from the lower index limits and add- - # ing halo_width_on_t6sg to the upper index limits, i.e. - # - # istart_of_t7_with_halo_on_t6sg = istart_of_t7_on_t6sg - halo_width_on_t6sg - # iend_of_t7_with_halo_on_t6sg = iend_of_t7_on_t6sg + halo_width_on_t6sg - # jstart_of_t7_with_halo_on_t6sg = jstart_of_t7_on_t6sg - halo_width_on_t6sg - # jend_of_t7_with_halo_on_t6sg = jend_of_t7_on_t6sg + halo_width_on_t6sg - # - # We calculate these next. - # - # ----------------------------------------------------------------------- - # - istart_of_t7_with_halo_on_t6sg = int(istart_of_t7_on_t6sg - halo_width_on_t6sg) - iend_of_t7_with_halo_on_t6sg = int(iend_of_t7_on_t6sg + halo_width_on_t6sg) - jstart_of_t7_with_halo_on_t6sg = int(jstart_of_t7_on_t6sg - halo_width_on_t6sg) - jend_of_t7_with_halo_on_t6sg = int(jend_of_t7_on_t6sg + halo_width_on_t6sg) - # - # ----------------------------------------------------------------------- - # - # As for the regional grid without a halo, the regional grid with a wide - # halo that make_hgrid will generate must be such that grid cells on - # tile 6 either lie completely within this grid or outside of it, i.e. - # they cannot lie partially within/outside of it. This implies that the - # starting indices on the tile 6 supergrid of the grid with wide halo - # must be odd while the ending indices must be even. Thus, below, we - # subtract 1 from the starting indices if they are even (which ensures - # that there will be at least halo_width_on_t7g halo cells along the left and bot- - # tom boundaries), and we add 1 to the ending indices if they are odd - # (which ensures that there will be at least halo_width_on_t7g halo cells along the - # right and top boundaries). - # - # ----------------------------------------------------------------------- - # - if istart_of_t7_with_halo_on_t6sg % 2 == 0: - istart_of_t7_with_halo_on_t6sg = istart_of_t7_with_halo_on_t6sg - 1 - - if iend_of_t7_with_halo_on_t6sg % 2 == 1: - iend_of_t7_with_halo_on_t6sg = iend_of_t7_with_halo_on_t6sg + 1 - - if jstart_of_t7_with_halo_on_t6sg % 2 == 0: - jstart_of_t7_with_halo_on_t6sg = jstart_of_t7_with_halo_on_t6sg - 1 - - if jend_of_t7_with_halo_on_t6sg % 2 == 1: - jend_of_t7_with_halo_on_t6sg = jend_of_t7_with_halo_on_t6sg + 1 - # - # ----------------------------------------------------------------------- - # - # Now that the starting and ending tile 6 supergrid indices of the re- - # gional grid with the wide halo have been calculated (and adjusted), we - # recalculate the width of the wide halo on: - # - # 1) the tile 6 supergrid; - # 2) the tile 6 grid; and - # 3) the tile 7 grid. - # - # These are the final values of these quantities that are guaranteed to - # correspond to the starting and ending indices on the tile 6 supergrid. - # - # ----------------------------------------------------------------------- - # - print_info_msg( - f""" - Original values of the halo width on the tile 6 supergrid and on the - tile 7 grid are: - halo_width_on_t6sg = {halo_width_on_t6sg} - halo_width_on_t7g = {halo_width_on_t7g}""", - verbose=verbose, - ) - - halo_width_on_t6sg = istart_of_t7_on_t6sg - istart_of_t7_with_halo_on_t6sg - halo_width_on_t6g = halo_width_on_t6sg // 2 - halo_width_on_t7g = int(halo_width_on_t6g * refine_ratio_t6g_to_t7g) - - print_info_msg( - f""" - Values of the halo width on the tile 6 supergrid and on the tile 7 grid - AFTER adjustments are: - halo_width_on_t6sg = {halo_width_on_t6sg} - halo_width_on_t7g = {halo_width_on_t7g}""", - verbose=verbose, - ) - # - # ----------------------------------------------------------------------- - # - # Calculate the number of cells that the regional domain (without halo) - # has in each of the two horizontal directions (say x and y). We denote - # these by nx_of_t7_on_t7g and ny_of_t7_on_t7g, respectively. These - # will be needed in the "shave" steps in the grid generation task of the - # workflow. - # - # ----------------------------------------------------------------------- - # - nx_of_t7_on_t6sg = iend_of_t7_on_t6sg - istart_of_t7_on_t6sg + 1 - nx_of_t7_on_t6g = nx_of_t7_on_t6sg / 2 - nx_of_t7_on_t7g = int(nx_of_t7_on_t6g * refine_ratio_t6g_to_t7g) - - ny_of_t7_on_t6sg = jend_of_t7_on_t6sg - jstart_of_t7_on_t6sg + 1 - ny_of_t7_on_t6g = ny_of_t7_on_t6sg / 2 - ny_of_t7_on_t7g = int(ny_of_t7_on_t6g * refine_ratio_t6g_to_t7g) - # - # The following are set only for informational purposes. - # - nx_of_t6_on_t6sg = 2 * nx_of_t6_on_t6g - ny_of_t6_on_t6sg = 2 * ny_of_t6_on_t6g - - prime_factors_nx_of_t7_on_t7g = prime_factors(nx_of_t7_on_t7g) - prime_factors_ny_of_t7_on_t7g = prime_factors(ny_of_t7_on_t7g) - - print_info_msg( - f""" - The number of cells in the two horizontal directions (x and y) on the - parent tile's (tile 6) grid and supergrid are: - nx_of_t6_on_t6g = {nx_of_t6_on_t6g} - ny_of_t6_on_t6g = {ny_of_t6_on_t6g} - nx_of_t6_on_t6sg = {nx_of_t6_on_t6sg} - ny_of_t6_on_t6sg = {ny_of_t6_on_t6sg} - - The number of cells in the two horizontal directions on the tile 6 grid - and supergrid that the regional domain (tile 7) WITHOUT A HALO encompas- - ses are: - nx_of_t7_on_t6g = {nx_of_t7_on_t6g} - ny_of_t7_on_t6g = {ny_of_t7_on_t6g} - nx_of_t7_on_t6sg = {nx_of_t7_on_t6sg} - ny_of_t7_on_t6sg = {ny_of_t7_on_t6sg} - - The starting and ending i and j indices on the tile 6 grid used to gene- - rate this regional grid are: - istart_of_t7_on_t6g = {istart_of_t7_on_t6g} - iend_of_t7_on_t6g = {iend_of_t7_on_t6g} - jstart_of_t7_on_t6g = {jstart_of_t7_on_t6g} - jend_of_t7_on_t6g = {jend_of_t7_on_t6g} - - The corresponding starting and ending i and j indices on the tile 6 su- - pergrid are: - istart_of_t7_on_t6sg = {istart_of_t7_on_t6sg} - iend_of_t7_on_t6sg = {iend_of_t7_on_t6sg} - jstart_of_t7_on_t6sg = {jstart_of_t7_on_t6sg} - jend_of_t7_on_t6sg = {jend_of_t7_on_t6sg} - - The refinement ratio (ratio of the number of cells in tile 7 that abut - a single cell in tile 6) is: - refine_ratio_t6g_to_t7g = {refine_ratio_t6g_to_t7g} - - The number of cells in the two horizontal directions on the regional do- - main's (i.e. tile 7's) grid WITHOUT A HALO are: - nx_of_t7_on_t7g = {nx_of_t7_on_t7g} - ny_of_t7_on_t7g = {ny_of_t7_on_t7g} - - The prime factors of nx_of_t7_on_t7g and ny_of_t7_on_t7g are (useful for - determining an MPI task layout): - prime_factors_nx_of_t7_on_t7g: {prime_factors_nx_of_t7_on_t7g} - prime_factors_ny_of_t7_on_t7g: {prime_factors_ny_of_t7_on_t7g}""", - verbose=verbose, - ) - # - # ----------------------------------------------------------------------- - # - # For informational purposes, calculate the number of cells in each di- - # rection on the regional grid including the wide halo (of width halo_- - # width_on_t7g cells). We denote these by nx_of_t7_with_halo_on_t7g and - # ny_of_t7_with_halo_on_t7g, respectively. - # - # ----------------------------------------------------------------------- - # - nx_of_t7_with_halo_on_t6sg = ( - iend_of_t7_with_halo_on_t6sg - istart_of_t7_with_halo_on_t6sg + 1 - ) - nx_of_t7_with_halo_on_t6g = nx_of_t7_with_halo_on_t6sg / 2 - nx_of_t7_with_halo_on_t7g = nx_of_t7_with_halo_on_t6g * refine_ratio_t6g_to_t7g - - ny_of_t7_with_halo_on_t6sg = ( - jend_of_t7_with_halo_on_t6sg - jstart_of_t7_with_halo_on_t6sg + 1 - ) - ny_of_t7_with_halo_on_t6g = ny_of_t7_with_halo_on_t6sg / 2 - ny_of_t7_with_halo_on_t7g = ny_of_t7_with_halo_on_t6g * refine_ratio_t6g_to_t7g - - print_info_msg( - f""" - nx_of_t7_with_halo_on_t7g = {nx_of_t7_with_halo_on_t7g} - (istart_of_t7_with_halo_on_t6sg = {istart_of_t7_with_halo_on_t6sg}, - iend_of_t7_with_halo_on_t6sg = {iend_of_t7_with_halo_on_t6sg})""", - verbose=verbose, - ) - - print_info_msg( - f""" - ny_of_t7_with_halo_on_t7g = {ny_of_t7_with_halo_on_t7g} - (jstart_of_t7_with_halo_on_t6sg = {jstart_of_t7_with_halo_on_t6sg}, - jend_of_t7_with_halo_on_t6sg = {jend_of_t7_with_halo_on_t6sg})""", - verbose=verbose, - ) - # - # ----------------------------------------------------------------------- - # - # Return output variables. - # - # ----------------------------------------------------------------------- - # - return { - "LON_CTR": lon_of_t7_ctr, - "LAT_CTR": lat_of_t7_ctr, - "NX": nx_of_t7_on_t7g, - "NY": ny_of_t7_on_t7g, - "NHW": halo_width_on_t7g, - "STRETCH_FAC": stretch_factor, - "ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG": istart_of_t7_with_halo_on_t6sg, - "IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG": iend_of_t7_with_halo_on_t6sg, - "JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG": jstart_of_t7_with_halo_on_t6sg, - "JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG": jend_of_t7_with_halo_on_t6sg, - } - - -class Testing(unittest.TestCase): - def test_set_gridparams_GFDLgrid(self): - grid_params = set_gridparams_GFDLgrid( - lon_of_t6_ctr=-97.5, - lat_of_t6_ctr=38.5, - res_of_t6g=96, - stretch_factor=1.4, - refine_ratio_t6g_to_t7g=3, - istart_of_t7_on_t6g=13, - iend_of_t7_on_t6g=84, - jstart_of_t7_on_t6g=17, - jend_of_t7_on_t6g=80, - verbose=True, - nh4=4, - ) - - self.assertEqual( - list(grid_params.values()), - [-97.5, 38.5, 216, 192, 6, 1.4, 21, 172, 29, 164], - ) - - def setUp(self): - pass diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py deleted file mode 100755 index a82199d966..0000000000 --- a/ush/set_ozone_param.py +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import os -import unittest -from textwrap import dedent - -from python_utils import ( - log_info, - list_to_str, - print_input_args, - load_xml_file, - has_tag_with_value, - find_pattern_in_str, -) - - -def set_ozone_param(ccpp_phys_suite_fp, link_mappings): - """Function that does the following: - (1) Determines the ozone parameterization being used by checking in the - CCPP physics suite XML. - - (2) Sets the name of the global ozone production/loss file in the FIXgsm - FIXgsm system directory to copy to the experiment's FIXam directory. - - (3) Updates the symlink for the ozone file provided in link_mappings - list to include the name of global ozone production/loss file. - - Args: - ccpp_phys_suite_fp: full path to CCPP physics suite - link_mappings: list of mappings between symlinks and their - target files for this experiment - Returns: - ozone_param: a string - fixgsm_ozone_fn: a path to a fix file that should be used with - this experiment - ozone_link_mappings: a list of mappings for the files needed for - this experiment - - """ - - print_input_args(locals()) - - # - # ----------------------------------------------------------------------- - # - # Get the name of the ozone parameterization being used. There are two - # possible ozone parameterizations: - # - # (1) A parameterization developed/published in 2015. Here, we refer to - # this as the 2015 parameterization. If this is being used, then we - # set the variable ozone_param to the string "ozphys_2015". - # - # (2) A parameterization developed/published sometime after 2015. Here, - # we refer to this as the after-2015 parameterization. If this is - # being used, then we set the variable ozone_param to the string - # "ozphys". - # - # We check the CCPP physics suite definition file (SDF) to determine the - # parameterization being used. If this file contains the line - # - # ozphys_2015 - # - # then the 2015 parameterization is being used. If it instead contains - # the line - # - # ozphys - # - # then the after-2015 parameterization is being used. (The SDF should - # contain exactly one of these lines; not both nor neither; we check for - # this.) - # - # ----------------------------------------------------------------------- - # - tree = load_xml_file(ccpp_phys_suite_fp) - ozone_param = "" - if has_tag_with_value(tree, "scheme", "ozphys_2015"): - fixgsm_ozone_fn = "ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" - ozone_param = "ozphys_2015" - elif has_tag_with_value(tree, "scheme", "ozphys"): - fixgsm_ozone_fn = "global_o3prdlos.f77" - ozone_param = "ozphys" - else: - raise KeyError( - f"Unknown or no ozone parameterization specified in the " - "CCPP physics suite file '{ccpp_phys_suite_fp}'" - ) - # - # ----------------------------------------------------------------------- - # - # Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that - # specifies the mapping between the symlink for the ozone production/loss - # file that must be created in each cycle directory and its target in the - # FIXam directory. The name of the symlink is already in the array, but - # the target is not because it depends on the ozone parameterization that - # the physics suite uses. Since we determined the ozone parameterization - # above, we now set the target of the symlink accordingly. - - # - # ----------------------------------------------------------------------- - # - # Set the mapping between the symlink and the target file we just - # found. The link name is already in the list, but the target file - # is not. - # - # ----------------------------------------------------------------------- - # - - ozone_symlink = "global_o3prdlos.f77" - fixgsm_ozone_fn_is_set = False - - ozone_link_mappings = copy.deepcopy(link_mappings) - for i, mapping in enumerate(ozone_link_mappings): - symlink = mapping.split("|")[0] - if symlink.strip() == ozone_symlink: - ozone_link_mappings[i] = f"{symlink}| {fixgsm_ozone_fn}" - fixgsm_ozone_fn_is_set = True - break - - # Make sure the list has been updated - if not fixgsm_ozone_fn_is_set: - - raise Exception( - f""" - Unable to set name of the ozone production/loss file in the FIXgsm directory - in the array that specifies the mapping between the symlinks that need to - be created in the cycle directories and the files in the FIXgsm directory: - fixgsm_ozone_fn_is_set = '{fixgsm_ozone_fn_is_set}'""" - ) - - return ozone_param, fixgsm_ozone_fn, ozone_link_mappings - - -class Testing(unittest.TestCase): - def test_set_ozone_param(self): - USHdir = os.path.dirname(os.path.abspath(__file__)) - ozone_param, _, _ = set_ozone_param( - f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml", - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, - ) - self.assertEqual("ozphys_2015", ozone_param) - - def setUp(self): - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ - "aerosol.dat | global_climaeropac_global.txt", - "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", - "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", - "co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt", - "co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt", - "co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt", - "co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt", - "co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt", - "co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt", - "co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt", - "co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt", - "co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt", - "co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt", - "co2historicaldata_glob.txt | global_co2historicaldata_glob.txt", - "co2monthlycyc.txt | co2monthlycyc.txt", - "global_h2oprdlos.f77 | global_h2o_pltc.f77", - "global_zorclim.1x1.grb | global_zorclim.1x1.grb", - "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", - "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77", - ] diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py deleted file mode 100755 index b5761992b7..0000000000 --- a/ush/set_predef_grid_params.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python3 - -import unittest -import os -from textwrap import dedent - -from python_utils import ( - load_config_file, - flatten_dict, -) - - -def set_predef_grid_params(USHdir, grid_name, quilting): - """Sets grid parameters for the specified predefined grid - - Args: - USHdir: path to the SRW ush directory - grid_name str specifying the predefined grid name. - quilting: bool whether quilting should be used for output - Returns: - Dictionary of grid parameters - """ - - params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml")) - try: - params_dict = params_dict[grid_name] - except KeyError: - errmsg = dedent( - f""" - PREDEF_GRID_NAME = {predef_grid_name} not found in predef_grid_params.yaml - Check your config file settings.""" - ) - raise Exception(errmsg) from None - - # We don't need the quilting section if user wants it turned off - if not quilting: - params_dict.pop("QUILTING") - else: - params_dict = flatten_dict(params_dict) - - return params_dict - - -class Testing(unittest.TestCase): - def test_set_predef_grid_params(self): - ushdir = os.path.dirname(os.path.abspath(__file__)) - fcst_config = dict( - PREDEF_GRID_NAME="RRFS_CONUS_3km", - QUILTING=False, - DT_ATMOS=36, - LAYOUT_X=18, - LAYOUT_Y=36, - BLOCKSIZE=28, - ) - params_dict = set_predef_grid_params( - ushdir, - fcst_config["PREDEF_GRID_NAME"], - fcst_config["QUILTING"], - ) - self.assertEqual(params_dict["GRID_GEN_METHOD"], "ESGgrid") - self.assertEqual(params_dict["ESGgrid_LON_CTR"], -97.5) - fcst_config = dict( - PREDEF_GRID_NAME="RRFS_CONUS_3km", - QUILTING=True, - DT_ATMOS=36, - LAYOUT_X=18, - LAYOUT_Y=36, - BLOCKSIZE=28, - ) - params_dict = set_predef_grid_params( - ushdir, - fcst_config["PREDEF_GRID_NAME"], - fcst_config["QUILTING"], - ) - self.assertEqual(params_dict["WRTCMP_nx"], 1799) diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh deleted file mode 100755 index 3b86cf1022..0000000000 --- a/ush/set_vx_fhr_list.sh +++ /dev/null @@ -1,282 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that generates a list of forecast hours -# such that for each hour there exist a corresponding obs file. It does -# this by first generating a generic sequence of forecast hours and then -# removing from that sequence any hour for which there is no obs file. -# -#----------------------------------------------------------------------- -# -function set_vx_fhr_list() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "cdate" \ - "fcst_len_hrs" \ - "field" \ - "accum_hh" \ - "base_dir" \ - "fn_template" \ - "check_hourly_files" \ - "outvarname_fhr_list" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local crnt_tmpl \ - crnt_tmpl_esc \ - fhr \ - fhr_array \ - fhr_int \ - fhr_list \ - fhr_min \ - fhr_max \ - fn \ - fp \ - i \ - num_fcst_hrs \ - num_missing_files \ - regex_search_tmpl \ - remainder \ - skip_this_fhr -# -#----------------------------------------------------------------------- -# -# Create array containing set of forecast hours for which we will check -# for the existence of corresponding observation or forecast file. -# -#----------------------------------------------------------------------- -# - case "${field}" in - "APCP") - fhr_min="${accum_hh}" - fhr_int="${accum_hh}" - ;; - "REFC") - fhr_min="01" - fhr_int="01" - ;; - "RETOP") - fhr_min="01" - fhr_int="01" - ;; - "SFC") - fhr_min="01" - fhr_int="01" - ;; - "UPA") - fhr_min="06" - fhr_int="06" - ;; - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this field (field): - field = \"${field}\"" - ;; - esac - fhr_max="${fcst_len_hrs}" - - fhr_array=($( seq ${fhr_min} ${fhr_int} ${fhr_max} )) - print_info_msg "$VERBOSE" "\ -Initial (i.e. before filtering for missing files) set of forecast hours -is: - fhr_array = ( $( printf "\"%s\" " "${fhr_array[@]}" )) -" -# -#----------------------------------------------------------------------- -# -# Loop through all forecast hours. For each one for which a corresponding -# file exists, add the forecast hour to fhr_list. fhr_list will be a -# scalar containing a comma-separated list of forecast hours for which -# corresponding files exist. Also, use the variable num_missing_files -# to keep track of the number of files that are missing. -# -#----------------------------------------------------------------------- -# - fhr_list="" - num_missing_files="0" - num_fcst_hrs=${#fhr_array[@]} - for (( i=0; i<${num_fcst_hrs}; i++ )); do - - fhr_orig="${fhr_array[$i]}" - - if [ "${check_hourly_files}" = "TRUE" ]; then - fhr=$(( ${fhr_orig} - ${accum_hh} + 1 )) - num_back_hrs=${accum_hh} - else - fhr=${fhr_orig} - num_back_hrs=1 - fi - - skip_this_fhr="FALSE" - for (( j=0; j<${num_back_hrs}; j++ )); do -# -# Use the provided template to set the name of/relative path to the file -# - fn="${fn_template}" - regex_search_tmpl="(.*)(\{.*\})(.*)" - crnt_tmpl=$( printf "%s" "${fn_template}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\2|p" ) - remainder=$( printf "%s" "${fn_template}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\1\3|p" ) - while [ ! -z "${crnt_tmpl}" ]; do - - eval_METplus_timestr_tmpl \ - init_time="$cdate" \ - fhr="$fhr" \ - METplus_timestr_tmpl="${crnt_tmpl}" \ - outvarname_formatted_time="actual_value" -# -# Replace METplus time templates in fn with actual times. Note that -# when using sed, we need to escape various characters (question mark, -# closing and opening curly braces, etc) in the METplus template in -# order for the sed command below to work properly. -# - crnt_tmpl_esc=$( echo "${crnt_tmpl}" | \ - $SED -r -e "s/\?/\\\?/g" -e "s/\{/\\\{/g" -e "s/\}/\\\}/g" ) - fn=$( echo "${fn}" | \ - $SED -n -r "s|(.*)(${crnt_tmpl_esc})(.*)|\1${actual_value}\3|p" ) -# -# Set up values for the next iteration of the while-loop. -# - crnt_tmpl=$( printf "%s" "${remainder}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\2|p" ) - remainder=$( printf "%s" "${remainder}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\1\3|p" ) - - done -# -# Get the full path to the file and check if it exists. -# - fp="${base_dir}/${fn}" - - if [ -f "${fp}" ]; then - print_info_msg "\ -Found file (fp) for the current forecast hour (fhr; relative to the cycle -date cdate): - fhr = \"$fhr\" - cdate = \"$cdate\" - fp = \"${fp}\" -" - else - skip_this_fhr="TRUE" - num_missing_files=$(( ${num_missing_files} + 1 )) - print_info_msg "\ -The file (fp) for the current forecast hour (fhr; relative to the cycle -date cdate) is missing: - fhr = \"$fhr\" - cdate = \"$cdate\" - fp = \"${fp}\" -Excluding the current forecast hour from the list of hours passed to the -METplus configuration file. -" - break - fi - - fhr=$(( $fhr + 1 )) - - done - - if [ "${skip_this_fhr}" != "TRUE" ]; then - fhr_list="${fhr_list},${fhr_orig}" - fi - - done -# -# Remove leading comma from fhr_list. -# - fhr_list=$( echo "${fhr_list}" | $SED "s/^,//g" ) - print_info_msg "$VERBOSE" "\ -Final (i.e. after filtering for missing files) set of foreast hours is -(written as a single string): - fhr_list = \"${fhr_list}\" -" -# -#----------------------------------------------------------------------- -# -# If the number of missing files is greater than the user-specified -# variable NUM_MISSING_OBS_FILES_MAX, print out an error message and -# exit. -# -#----------------------------------------------------------------------- -# - if [ "${num_missing_files}" -gt "${NUM_MISSING_OBS_FILES_MAX}" ]; then - print_err_msg_exit "\ -The number of missing files (num_missing_files) is greater than the -maximum allowed number (NUM_MISSING_OBS_MAS): - num_missing_files = ${num_missing_files} - NUM_MISSING_OBS_FILES_MAX = ${NUM_MISSING_OBS_FILES_MAX}" - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_fhr_list}" ]; then - printf -v ${outvarname_fhr_list} "%s" "${fhr_list}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/set_vx_params.sh b/ush/set_vx_params.sh deleted file mode 100755 index 61b74ff4ce..0000000000 --- a/ush/set_vx_params.sh +++ /dev/null @@ -1,276 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that sets various parameters needed when -# performing verification. The way these parameters are set depends on -# the field being verified and, if the field is accumulated precipitation, -# the accumulation period (both of which are inputs to this function). -# -# As of 20220928, the verification tasks in the SRW App workflow use the -# MET/METplus software (MET = Model Evaluation Tools) developed at the -# DTC (Developmental Testbed Center). -# -#----------------------------------------------------------------------- -# -function set_vx_params() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "obtype" \ - "field" \ - "accum_hh" \ - "outvarname_grid_or_point" \ - "outvarname_field_is_APCPgt01h" \ - "outvarname_fieldname_in_obs_input" \ - "outvarname_fieldname_in_fcst_input" \ - "outvarname_fieldname_in_MET_output" \ - "outvarname_fieldname_in_MET_filedir_names" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local _grid_or_point_ \ - _field_is_APCPgt01h_ \ - fieldname_in_obs_input \ - fieldname_in_fcst_input \ - fieldname_in_MET_output \ - fieldname_in_MET_filedir_names -# -#----------------------------------------------------------------------- -# -# Make sure that accum_hh is a 2-digit integer. -# -#----------------------------------------------------------------------- -# - if [[ ! "${accum_hh}" =~ ^[0-9]{2}$ ]]; then - print_err_msg_exit "\ -The accumulation (accum_hh) must be a 2-digit integer: - accum_hh = \"${accum_hh}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set the parameters. Definitions: -# -# grid_or_point: -# String that is set to either "grid" or "point" depending on whether -# the field in consideration has obs that are gridded or point-based. -# -# field_is_APCPgt01h: -# Flag that specifies whether the input field and accumulation together -# represent accumulated precipitation with accumulation period greater -# than 1 hour. -# -# fieldname_in_obs_input: -# String used to search for the field in the input observation files -# read in by MET. -# -# fieldname_in_fcst_input: -# String used to search for the field in the input forecast files read -# in by MET. -# -# fieldname_in_MET_output: -# String that will be used in naming arrays defined in MET output files -# (e.g. NetCDF, stat, etc). -# -# fieldname_in_MET_filedir_names: -# String that will be used in naming directories and files (e.g. NetCDF -# files, stat files, log files, staging directories) generated by MET -# or METplus. -# -#----------------------------------------------------------------------- -# - _grid_or_point_="FALSE" - _field_is_APCPgt01h_="FALSE" - fieldname_in_obs_input="" - fieldname_in_fcst_input="" - fieldname_in_MET_output="" - fieldname_in_MET_filedir_names="" - - case "${obtype}" in - - "CCPA") - - _grid_or_point_="grid" - case "${field}" in - - "APCP") - fieldname_in_obs_input="${field}" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}_${accum_hh}" - fieldname_in_MET_filedir_names="${field}${accum_hh}h" - if [ "${accum_hh}" -gt "01" ]; then - _field_is_APCPgt01h_="TRUE" - fi - ;; - - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: - obtype = \"${obtype}\" - field = \"${field}\"" - ;; - - esac - ;; - - "MRMS") - - _grid_or_point_="grid" - case "${field}" in - - "REFC") - fieldname_in_obs_input="MergedReflectivityQCComposite" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" - ;; - - "RETOP") - fieldname_in_obs_input="EchoTop18" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" - ;; - - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: - obtype = \"${obtype}\" - field = \"${field}\"" - ;; - - esac - ;; - - "NDAS") - - _grid_or_point_="point" - case "${field}" in - - "SFC") - fieldname_in_obs_input="" - fieldname_in_fcst_input="" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" - ;; - - "UPA") - fieldname_in_obs_input="" - fieldname_in_fcst_input="" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" - ;; - - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: - obtype = \"${obtype}\" - field = \"${field}\"" - ;; - - esac - ;; - - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this observation type (obtype): - obtype = \"${obtype}\"" - ;; - - esac -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_grid_or_point}" ]; then - printf -v ${outvarname_grid_or_point} "%s" "${_grid_or_point_}" - fi - - if [ ! -z "${outvarname_field_is_APCPgt01h}" ]; then - printf -v ${outvarname_field_is_APCPgt01h} "%s" "${_field_is_APCPgt01h_}" - fi - - if [ ! -z "${outvarname_fieldname_in_obs_input}" ]; then - printf -v ${outvarname_fieldname_in_obs_input} "%s" "${fieldname_in_obs_input}" - fi - - if [ ! -z "${outvarname_fieldname_in_fcst_input}" ]; then - printf -v ${outvarname_fieldname_in_fcst_input} "%s" "${fieldname_in_fcst_input}" - fi - - if [ ! -z "${outvarname_fieldname_in_MET_output}" ]; then - printf -v ${outvarname_fieldname_in_MET_output} "%s" "${fieldname_in_MET_output}" - fi - - if [ ! -z "${outvarname_fieldname_in_MET_filedir_names}" ]; then - printf -v ${outvarname_fieldname_in_MET_filedir_names} "%s" "${fieldname_in_MET_filedir_names}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/setup.py b/ush/setup.py deleted file mode 100755 index 7752a00d0f..0000000000 --- a/ush/setup.py +++ /dev/null @@ -1,1399 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import datetime -import traceback -import logging -from textwrap import dedent - -from python_utils import ( - log_info, - cd_vrfy, - mkdir_vrfy, - ln_vrfy, - rm_vrfy, - check_var_valid_value, - lowercase, - uppercase, - list_to_str, - check_for_preexist_dir_file, - flatten_dict, - check_structure_dict, - update_dict, - import_vars, - get_env_var, - load_config_file, - cfg_to_shell_str, - cfg_to_yaml_str, - load_ini_config, - get_ini_value, - str_to_list, - extend_yaml, -) - -from set_cycle_dates import set_cycle_dates -from set_predef_grid_params import set_predef_grid_params -from set_ozone_param import set_ozone_param -from set_gridparams_ESGgrid import set_gridparams_ESGgrid -from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid -from link_fix import link_fix -from check_ruc_lsm import check_ruc_lsm -from set_thompson_mp_fix_files import set_thompson_mp_fix_files - - -def load_config_for_setup(ushdir, default_config, user_config): - """Load in the default, machine, and user configuration files into - Python dictionaries. Return the combined experiment dictionary. - - Args: - ushdir (str): Path to the ush directory for SRW - default_config (str): Path to the default config YAML - user_config (str): Path to the user-provided config YAML - - Returns: - Python dict of configuration settings from YAML files. - """ - - # Load the default config. - logging.debug(f"Loading config defaults file {default_config}") - cfg_d = load_config_file(default_config) - logging.debug(f"Read in the following values from config defaults file:\n") - logging.debug(cfg_d) - - # Load the user config file, then ensure all user-specified - # variables correspond to a default value. - if not os.path.exists(user_config): - raise FileNotFoundError( - f""" - User config file not found: - user_config = {user_config} - """ - ) - - try: - cfg_u = load_config_file(user_config) - logging.debug(f"Read in the following values from YAML config file {user_config}:\n") - logging.debug(cfg_u) - except: - errmsg = dedent( - f"""\n - Could not load YAML config file: {user_config} - Reference the above traceback for more information. - """ - ) - raise Exception(errmsg) - - # Make sure the keys in user config match those in the default - # config. - invalid = check_structure_dict(cfg_u, cfg_d) - if invalid: - errmsg = f"Invalid key(s) specified in {user_config}:\n" - for entry in invalid: - errmsg = errmsg + f"{entry} = {invalid[entry]}\n" - errmsg = errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" - raise Exception(errmsg) - - # Mandatory variables *must* be set in the user's config; the default value is invalid - mandatory = ["user.MACHINE"] - for val in mandatory: - sect, key = val.split(".") - user_setting = cfg_u.get(sect, {}).get(key) - if user_setting is None: - raise Exception( - f"""Mandatory variable "{val}" not found in - user config file {user_config}""" - ) - - # Load the machine config file - machine = uppercase(cfg_u.get("user").get("MACHINE")) - cfg_u["user"]["MACHINE"] = uppercase(machine) - - machine_file = os.path.join(ushdir, os.pardir, "parm", "machine", f"{lowercase(machine)}.yaml") - - if not os.path.exists(machine_file): - raise FileNotFoundError( - dedent( - f""" - The machine file {machine_file} does not exist. - Check that you have specified the correct machine - ({machine}) in your config file {user_config}""" - ) - ) - logging.debug(f"Loading machine defaults file {machine_file}") - machine_cfg = load_config_file(machine_file) - - # Load the fixed files configuration - cfg_f = load_config_file( - os.path.join(ushdir, os.pardir, "parm", "fixed_files_mapping.yaml") - ) - - # Load the constants file - cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) - - # Update default config with the constants, the machine config, and - # then the user_config - # Recall: update_dict updates the second dictionary with the first, - # and so, we update the default config settings in place with all - # the others. - - # Constants - update_dict(cfg_c, cfg_d) - - # Machine settings - update_dict(machine_cfg, cfg_d) - - # Fixed files - update_dict(cfg_f, cfg_d) - - # User settings (take precedence over all others) - update_dict(cfg_u, cfg_d) - - # Set "Home" directory, the top-level ufs-srweather-app directory - homeaqm = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) - cfg_d["user"]["HOMEaqm"] = homeaqm - - # Special logic if EXPT_BASEDIR is a relative path; see config_defaults.yaml for explanation - expt_basedir = cfg_d["workflow"]["EXPT_BASEDIR"] - if (not expt_basedir) or (expt_basedir[0] != "/"): - expt_basedir = os.path.join(homeaqm, "..", "expt_dirs", expt_basedir) - try: - expt_basedir = os.path.realpath(expt_basedir) - except: - pass - cfg_d["workflow"]["EXPT_BASEDIR"] = os.path.abspath(expt_basedir) - - extend_yaml(cfg_d) - - # Do any conversions of data types - for sect, settings in cfg_d.items(): - for k, v in settings.items(): - if not (v is None or v == ""): - cfg_d[sect][k] = str_to_list(v) - - # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid - mandatory = [ - "EXPT_SUBDIR", - "NCORES_PER_NODE", - "FIXgsm", - "FIXaer", - "FIXlut", - "FIXorg", - "FIXsfc", - ] - flat_cfg = flatten_dict(cfg_d) - for val in mandatory: - if not flat_cfg.get(val): - raise Exception( - dedent( - f""" - Mandatory variable "{val}" not found in: - user config file {user_config} - OR - machine file {machine_file} - """ - ) - ) - - # Check that input dates are in a date format - dates = ["DATE_FIRST_CYCL", "DATE_LAST_CYCL"] - for val in dates: - if not isinstance(cfg_d["workflow"][val], datetime.date): - raise Exception( - dedent( - f""" - Date variable {val}={cfg_d['workflow'][val]} is not in a valid date format. - - For examples of valid formats, see the Users' Guide. - """ - ) - ) - - return cfg_d - - -def set_srw_paths(ushdir, expt_config): - - """ - Generate a dictionary of directories that describe the SRW - structure, i.e., where SRW is installed, and the paths to - external repositories managed via the manage_externals tool. - - Other paths for SRW are set as defaults in config_defaults.yaml - - Args: - ushdir: (str) path to the system location of the ush/ directory - under the SRW clone - expt_config: (dict) contains the configuration settings for the - user-defined experiment - - Returns: - dictionary of config settings and system paths as keys/values - """ - - # HOMEaqm is the location of the SRW clone, one directory above ush/ - homeaqm = expt_config.get("user", {}).get("HOMEaqm") - - # Read Externals.cfg - mng_extrns_cfg_fn = os.path.join(homeaqm, "sorc", "Externals.cfg") - try: - mng_extrns_cfg_fn = os.readlink(mng_extrns_cfg_fn) - except: - pass - cfg = load_ini_config(mng_extrns_cfg_fn) - - # Get the base directory of the FV3 forecast model code. - external_name = expt_config.get("workflow", {}).get("FCST_MODEL") - property_name = "local_path" - - try: - ufs_wthr_mdl_dir = get_ini_value(cfg, external_name, property_name) - except KeyError: - errmsg = dedent( - f""" - Externals configuration file {mng_extrns_cfg_fn} - does not contain '{external_name}'.""" - ) - raise Exception(errmsg) from None - - # Check that the model code has been downloaded - ufs_wthr_mdl_dir = os.path.join(homeaqm, "sorc", ufs_wthr_mdl_dir) - if not os.path.exists(ufs_wthr_mdl_dir): - raise FileNotFoundError( - dedent( - f""" - The base directory in which the FV3 source code should be located - (UFS_WTHR_MDL_DIR) does not exist: - UFS_WTHR_MDL_DIR = '{ufs_wthr_mdl_dir}' - Please clone the external repository containing the code in this directory, - build the executable, and then rerun the workflow.""" - ) - ) - - return dict( - USHdir=ushdir, - UFS_WTHR_MDL_DIR=ufs_wthr_mdl_dir, - ) - - -def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): - """Function that validates user-provided configuration, and derives - a secondary set of parameters needed to configure a Rocoto-based SRW - workflow. The derived parameters use a set of required user-defined - parameters defined by either config_defaults.yaml, a user-provided - configuration file (config.yaml), or a YAML machine file. - - A set of global variable definitions is saved to the experiment - directory as a bash configure file that is sourced by scripts at run - time. - - Args: - USHdir (str): The full path of the ush/ directory where - this script is located - user_config_fn (str): The name of a user-provided config YAML - debug (bool): Enable extra output for debugging - - Returns: - None - """ - - logger = logging.getLogger(__name__) - - # print message - log_info( - f""" - ======================================================================== - Starting function setup() in \"{os.path.basename(__file__)}\"... - ========================================================================""" - ) - - # Create a dictionary of config options from defaults, machine, and - # user config files. - default_config_fp = os.path.join(USHdir, "config_defaults.yaml") - user_config_fp = os.path.join(USHdir, user_config_fn) - expt_config = load_config_for_setup(USHdir, default_config_fp, user_config_fp) - - # Set up some paths relative to the SRW clone - expt_config["user"].update(set_srw_paths(USHdir, expt_config)) - - # - # ----------------------------------------------------------------------- - # - # Validate the experiment configuration starting with the workflow, - # then in rough order of the tasks in the workflow - # - # ----------------------------------------------------------------------- - # - - # Workflow - workflow_config = expt_config["workflow"] - - # Generate a unique number for this workflow run. This may be used to - # get unique log file names for example - workflow_id = "id_" + str(int(datetime.datetime.now().timestamp())) - workflow_config["WORKFLOW_ID"] = workflow_id - log_info(f"""WORKFLOW ID = {workflow_id}""") - - debug = workflow_config.get("DEBUG") - if debug: - log_info( - """ - Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" - ) - workflow_config["VERBOSE"] = True - - verbose = workflow_config["VERBOSE"] - - # The forecast length (in integer hours) cannot contain more than 3 characters. - # Thus, its maximum value is 999. - fcst_len_hrs_max = 999 - fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") - if fcst_len_hrs > fcst_len_hrs_max: - raise ValueError( - f""" - Forecast length is greater than maximum allowed length: - FCST_LEN_HRS = {fcst_len_hrs} - fcst_len_hrs_max = {fcst_len_hrs_max}""" - ) - - - # - # ----------------------------------------------------------------------- - # - # Set the full path to the experiment directory. Then check if it already - # exists and if so, deal with it as specified by PREEXISTING_DIR_METHOD. - # - # ----------------------------------------------------------------------- - # - - expt_subdir = workflow_config.get("EXPT_SUBDIR", "") - exptdir = workflow_config.get("EXPTDIR") - - # Update some paths that include EXPTDIR and EXPT_BASEDIR - extend_yaml(expt_config) - preexisting_dir_method = workflow_config.get("PREEXISTING_DIR_METHOD", "") - try: - check_for_preexist_dir_file(exptdir, preexisting_dir_method) - except ValueError: - logger.exception( - f""" - Check that the following values are valid: - EXPTDIR {exptdir} - PREEXISTING_DIR_METHOD {preexisting_dir_method} - """ - ) - raise - except FileExistsError: - errmsg = dedent( - f""" - EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} - - To ignore this error, delete the directory, or set - PREEXISTING_DIR_METHOD = delete, or - PREEXISTING_DIR_METHOD = rename - in your config file. - """ - ) - raise FileExistsError(errmsg) from None - - # - # ----------------------------------------------------------------------- - # - # Set cron table entry for relaunching the workflow if - # USE_CRON_TO_RELAUNCH is set to TRUE. - # - # ----------------------------------------------------------------------- - # - if workflow_config.get("USE_CRON_TO_RELAUNCH"): - intvl_mnts = workflow_config.get("CRON_RELAUNCH_INTVL_MNTS") - launch_script_fn = workflow_config.get("WFLOW_LAUNCH_SCRIPT_FN") - launch_log_fn = workflow_config.get("WFLOW_LAUNCH_LOG_FN") - workflow_config["CRONTAB_LINE"] = ( - f"""*/{intvl_mnts} * * * * cd {exptdir} && """ - f"""./{launch_script_fn} called_from_cron="TRUE" >> ./{launch_log_fn} 2>&1""" - ) - # - # ----------------------------------------------------------------------- - # - # Check user settings against platform settings - # - # ----------------------------------------------------------------------- - # - - workflow_switches = expt_config["workflow_switches"] - run_task_make_grid = workflow_switches['RUN_TASK_MAKE_GRID'] - run_task_make_orog = workflow_switches['RUN_TASK_MAKE_OROG'] - run_task_make_sfc_climo = workflow_switches['RUN_TASK_MAKE_SFC_CLIMO'] - - # Necessary tasks are turned on - pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") - if pregen_basedir is None and not ( - run_task_make_grid and run_task_make_orog and run_task_make_sfc_climo - ): - raise Exception( - f""" - DOMAIN_PREGEN_BASEDIR must be set when any of the following - tasks are turned off: - RUN_TASK_MAKE_GRID = {run_task_make_grid} - RUN_TASK_MAKE_OROG = {run_task_make_orog} - RUN_TASK_MAKE_SFC_CLIMO = {run_task_make_sfc_climo}""" - ) - - # A batch system account is specified - if expt_config["platform"].get("WORKFLOW_MANAGER") is not None: - if not expt_config.get("user").get("ACCOUNT"): - raise Exception( - dedent( - f""" - ACCOUNT must be specified in config or machine file if using a workflow manager. - WORKFLOW_MANAGER = {expt_config["platform"].get("WORKFLOW_MANAGER")}\n""" - ) - ) - - # - # ----------------------------------------------------------------------- - # - # ICS and LBCS settings and validation - # - # ----------------------------------------------------------------------- - # - def get_location(xcs, fmt, expt_cfg): - ics_lbcs = expt_cfg.get("data", {}).get("ics_lbcs") - if ics_lbcs is not None: - v = ics_lbcs.get(xcs) - if not isinstance(v, dict): - return v - else: - return v.get(fmt, "") - else: - return "" - - # Get the paths to any platform-supported data streams - get_extrn_ics = expt_config.get("task_get_extrn_ics", {}) - extrn_mdl_sysbasedir_ics = get_location( - get_extrn_ics.get("EXTRN_MDL_NAME_ICS"), - get_extrn_ics.get("FV3GFS_FILE_FMT_ICS"), - expt_config, - ) - get_extrn_ics["EXTRN_MDL_SYSBASEDIR_ICS"] = extrn_mdl_sysbasedir_ics - - get_extrn_lbcs = expt_config.get("task_get_extrn_lbcs", {}) - extrn_mdl_sysbasedir_lbcs = get_location( - get_extrn_lbcs.get("EXTRN_MDL_NAME_LBCS"), - get_extrn_lbcs.get("FV3GFS_FILE_FMT_LBCS"), - expt_config, - ) - get_extrn_lbcs["EXTRN_MDL_SYSBASEDIR_LBCS"] = extrn_mdl_sysbasedir_lbcs - - # remove the data key -- it's not needed beyond this point - if "data" in expt_config: - expt_config.pop("data") - - # Check for the user-specified directories for external model files if - # USE_USER_STAGED_EXTRN_FILES is set to TRUE - task_keys = zip( - [get_extrn_ics, get_extrn_lbcs], - ["EXTRN_MDL_SOURCE_BASEDIR_ICS", "EXTRN_MDL_SOURCE_BASEDIR_LBCS"], - ) - - for task, data_key in task_keys: - use_staged_extrn_files = task.get("USE_USER_STAGED_EXTRN_FILES") - if use_staged_extrn_files: - basedir = task[data_key] - # Check for the base directory up to the first templated field. - idx = basedir.find("$") - if idx == -1: - idx = len(basedir) - - if not os.path.exists(basedir[:idx]): - raise FileNotFoundError( - f''' - The user-staged-data directory does not exist. - Please point to the correct path where your external - model files are stored. - {data_key} = \"{basedir}\"''' - ) - - # - # ----------------------------------------------------------------------- - # - # Forecast settings - # - # ----------------------------------------------------------------------- - # - - fcst_config = expt_config["task_run_fcst"] - grid_config = expt_config["task_make_grid"] - - # Warn if user has specified a large timestep inappropriately - hires_ccpp_suites = ["FV3_RRFS_v1beta", "FV3_WoFS_v0", "FV3_HRRR"] - if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites: - dt = fcst_config.get("DT_ATMOS") - if dt: - if dt > 40: - logger.warning(dedent( - f""" - WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short - time step regardless of grid resolution. The user-specified value - DT_ATMOS = {fcst_config.get("DT_ATMOS")} - may result in CFL violations or other errors! - """ - )) - - # Gather the pre-defined grid parameters, if needed - if workflow_config.get("PREDEF_GRID_NAME"): - grid_params = set_predef_grid_params( - USHdir, - workflow_config["PREDEF_GRID_NAME"], - fcst_config["QUILTING"], - ) - - # Users like to change these variables, so don't overwrite them - special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] - for param, value in grid_params.items(): - if param in special_vars: - param_val = fcst_config.get(param) - if param_val and isinstance(param_val, str) and "{{" not in param_val: - continue - elif isinstance(param_val, (int, float)): - continue - # DT_ATMOS needs special treatment based on CCPP suite - elif param == "DT_ATMOS": - if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites and grid_params[param] > 40: - logger.warning(dedent( - f""" - WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short - time step regardless of grid resolution; setting DT_ATMOS to 40.\n - This value can be overwritten in the user config file. - """ - )) - fcst_config[param] = 40 - else: - fcst_config[param] = value - else: - fcst_config[param] = value - elif param.startswith("WRTCMP"): - fcst_config[param] = value - elif param == "GRID_GEN_METHOD": - workflow_config[param] = value - else: - grid_config[param] = value - - # set varying forecast lengths only when fcst_len_hrs=-1 - - fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") - if fcst_len_hrs == -1: - - # Check that the number of entries divides into a day - fcst_len_cycl = workflow_config.get("FCST_LEN_CYCL") - incr_cycl_freq = int(workflow_config.get("INCR_CYCL_FREQ")) - - date_first_cycl = workflow_config.get("DATE_FIRST_CYCL") - date_last_cycl = workflow_config.get("DATE_LAST_CYCL") - - if 24 / incr_cycl_freq != len(fcst_len_cycl): - - # Also allow for the possibility that the user is running - # cycles for less than a day: - num_cycles = len(set_cycle_dates( - date_first_cycl, - date_last_cycl, - incr_cycl_freq)) - - if num_cycles != len(fcst_len_cycl): - logger.error(f""" The number of entries in FCST_LEN_CYCL does - not divide evenly into a 24 hour day or the number of cycles - in your experiment! - FCST_LEN_CYCL = {fcst_len_cycl} - """ - ) - raise ValueError - - # check the availability of restart intervals for restart capability of forecast - do_fcst_restart = fcst_config.get("DO_FCST_RESTART") - if do_fcst_restart: - restart_interval = fcst_config.get("RESTART_INTERVAL") - restart_hrs = [] - if " " in str(restart_interval): - restart_hrs = restart_interval.split() - else: - restart_hrs.append(str(restart_interval)) - - lbc_spec_intvl_hrs = expt_config["task_get_extrn_lbcs"]["LBC_SPEC_INTVL_HRS"] - for irst in restart_hrs: - rem_rst = int(irst) % lbc_spec_intvl_hrs - if rem_rst != 0: - raise Exception( - f""" - The restart interval is not divided by LBC_SPEC_INTVL_HRS: - RESTART_INTERVAL = {irst} - LBC_SPEC_INTVL_HRS = {lbc_spec_intvl_hrs}""" - ) - - # - # ----------------------------------------------------------------------- - # - # Set parameters according to the type of horizontal grid generation - # method specified. - # - # ----------------------------------------------------------------------- - # - grid_gen_method = workflow_config["GRID_GEN_METHOD"] - if grid_gen_method == "GFDLgrid": - grid_params = set_gridparams_GFDLgrid( - lon_of_t6_ctr=grid_config["GFDLgrid_LON_T6_CTR"], - lat_of_t6_ctr=grid_config["GFDLgrid_LAT_T6_CTR"], - res_of_t6g=grid_config["GFDLgrid_NUM_CELLS"], - stretch_factor=grid_config["GFDLgrid_STRETCH_FAC"], - refine_ratio_t6g_to_t7g=grid_config["GFDLgrid_REFINE_RATIO"], - istart_of_t7_on_t6g=grid_config["GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G"], - iend_of_t7_on_t6g=grid_config["GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G"], - jstart_of_t7_on_t6g=grid_config["GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G"], - jend_of_t7_on_t6g=grid_config["GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G"], - verbose=verbose, - nh4=expt_config["constants"]["NH4"], - ) - elif grid_gen_method == "ESGgrid": - grid_params = set_gridparams_ESGgrid( - lon_ctr=grid_config["ESGgrid_LON_CTR"], - lat_ctr=grid_config["ESGgrid_LAT_CTR"], - nx=grid_config["ESGgrid_NX"], - ny=grid_config["ESGgrid_NY"], - pazi=grid_config["ESGgrid_PAZI"], - halo_width=grid_config["ESGgrid_WIDE_HALO_WIDTH"], - delx=grid_config["ESGgrid_DELX"], - dely=grid_config["ESGgrid_DELY"], - constants=expt_config["constants"], - ) - else: - - errmsg = dedent( - f""" - Valid values of GRID_GEN_METHOD are GFDLgrid and ESGgrid. - The value provided is: - GRID_GEN_METHOD = {grid_gen_method} - """ - ) - raise KeyError(errmsg) from None - - # Add a grid parameter section to the experiment config - expt_config["grid_params"] = grid_params - - # Check to make sure that mandatory forecast variables are set. - vlist = [ - "DT_ATMOS", - "LAYOUT_X", - "LAYOUT_Y", - "BLOCKSIZE", - ] - for val in vlist: - if not fcst_config.get(val): - raise Exception(f"\nMandatory variable '{val}' has not been set\n") - - # - # ----------------------------------------------------------------------- - # - # Set magnitude of stochastic ad-hoc schemes to -999.0 if they are not - # being used. This is required at the moment, since "do_shum/sppt/skeb" - # does not override the use of the scheme unless the magnitude is also - # specifically set to -999.0. If all "do_shum/sppt/skeb" are set to - # "false," then none will run, regardless of the magnitude values. - # - # ----------------------------------------------------------------------- - # - global_sect = expt_config["global"] - if not global_sect.get("DO_SHUM"): - global_sect["SHUM_MAG"] = -999.0 - if not global_sect.get("DO_SKEB"): - global_sect["SKEB_MAG"] = -999.0 - if not global_sect.get("DO_SPPT"): - global_sect["SPPT_MAG"] = -999.0 - # - # ----------------------------------------------------------------------- - # - # If running with SPP in MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or - # RRTMG, count the number of entries in SPP_VAR_LIST to correctly set - # N_VAR_SPP, otherwise set it to zero. - # - # ----------------------------------------------------------------------- - # - if global_sect.get("DO_SPP"): - global_sect["N_VAR_SPP"] = len(global_sect["SPP_VAR_LIST"]) - else: - global_sect["N_VAR_SPP"] = 0 - # - # ----------------------------------------------------------------------- - # - # If running with SPP, confirm that each SPP-related namelist value - # contains the same number of entries as N_VAR_SPP (set above to be equal - # to the number of entries in SPP_VAR_LIST). - # - # ----------------------------------------------------------------------- - # - spp_vars = [ - "SPP_MAG_LIST", - "SPP_LSCALE", - "SPP_TSCALE", - "SPP_SIGTOP1", - "SPP_SIGTOP2", - "SPP_STDDEV_CUTOFF", - "ISEED_SPP", - ] - - if global_sect.get("DO_SPP"): - for spp_var in spp_vars: - if len(global_sect[spp_var]) != global_sect["N_VAR_SPP"]: - raise Exception( - f""" - All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist - variables must be of equal length to SPP_VAR_LIST: - SPP_VAR_LIST (length {global_sect['N_VAR_SPP']}) - {spp_var} (length {len(global_sect[spp_var])}) - """ - ) - # - # ----------------------------------------------------------------------- - # - # If running with Noah or RUC-LSM SPP, count the number of entries in - # LSM_SPP_VAR_LIST to correctly set N_VAR_LNDP, otherwise set it to zero. - # Also set LNDP_TYPE to 2 for LSM SPP, otherwise set it to zero. Finally, - # initialize an "FHCYC_LSM_SPP" variable to 0 and set it to 999 if LSM SPP - # is turned on. This requirement is necessary since LSM SPP cannot run with - # FHCYC=0 at the moment, but FHCYC cannot be set to anything less than the - # length of the forecast either. A bug fix will be submitted to - # ufs-weather-model soon, at which point, this requirement can be removed - # from regional_workflow. - # - # ----------------------------------------------------------------------- - # - if global_sect.get("DO_LSM_SPP"): - global_sect["N_VAR_LNDP"] = len(global_sect["LSM_SPP_VAR_LIST"]) - global_sect["LNDP_TYPE"] = 2 - global_sect["LNDP_MODEL_TYPE"] = 2 - global_sect["FHCYC_LSM_SPP_OR_NOT"] = 999 - else: - global_sect["N_VAR_LNDP"] = 0 - global_sect["LNDP_TYPE"] = 0 - global_sect["LNDP_MODEL_TYPE"] = 0 - global_sect["FHCYC_LSM_SPP_OR_NOT"] = 0 - # - # ----------------------------------------------------------------------- - # - # If running with LSM SPP, confirm that each LSM SPP-related namelist - # value contains the same number of entries as N_VAR_LNDP (set above to - # be equal to the number of entries in LSM_SPP_VAR_LIST). - # - # ----------------------------------------------------------------------- - # - lsm_spp_vars = [ - "LSM_SPP_MAG_LIST", - "LSM_SPP_LSCALE", - "LSM_SPP_TSCALE", - ] - if global_sect.get("DO_LSM_SPP"): - for lsm_spp_var in lsm_spp_vars: - if len(global_sect[lsm_spp_var]) != global_sect["N_VAR_LNDP"]: - raise Exception( - f""" - All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist - variables must be of equal length to SPP_VAR_LIST: - All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) - must be equal of equal length to LSM_SPP_VAR_LIST: - LSM_SPP_VAR_LIST (length {global_sect['N_VAR_LNDP']}) - {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} - """ - ) - - # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible - # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an - # array of forecast hours at which the boundary values will be updated. - - lbc_spec_intvl_hrs = get_extrn_lbcs.get("LBC_SPEC_INTVL_HRS") - rem = fcst_len_hrs % lbc_spec_intvl_hrs - if rem != 0 and fcst_len_hrs > 0: - raise Exception( - f""" - The forecast length (FCST_LEN_HRS) is not evenly divisible by the lateral - boundary conditions update interval (LBC_SPEC_INTVL_HRS): - FCST_LEN_HRS = {fcst_len_hrs} - LBC_SPEC_INTVL_HRS = {lbc_spec_intvl_hrs} - rem = FCST_LEN_HRS%%LBC_SPEC_INTVL_HRS = {rem}""" - ) - - # - # ----------------------------------------------------------------------- - # - # Post-processing validation and settings - # - # ----------------------------------------------------------------------- - # - - # If using a custom post configuration file, make sure that it exists. - post_config = expt_config["task_run_post"] - if post_config.get("USE_CUSTOM_POST_CONFIG_FILE"): - custom_post_config_fp = post_config.get("CUSTOM_POST_CONFIG_FP") - try: - # os.path.exists returns exception if passed None, so use - # "try/except" to catch it and the non-existence of a - # provided path - if not os.path.exists(custom_post_config_fp): - raise FileNotFoundError( - dedent( - f""" - USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom post configuration file - CUSTOM_POST_CONFIG_FP = {custom_post_config_fp} - could not be found.""" - ) - ) from None - except TypeError: - raise TypeError( - dedent( - f""" - USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom - post configuration file path (CUSTOM_POST_CONFIG_FP) is - None. - """ - ) - ) from None - except FileNotFoundError: - raise - - # If using external CRTM fix files to allow post-processing of synthetic - # satellite products from the UPP, make sure the CRTM fix file directory exists. - if global_sect.get("USE_CRTM"): - crtm_dir = global_sect.get("CRTM_DIR") - try: - # os.path.exists returns exception if passed None, so use - # "try/except" to catch it and the non-existence of a - # provided path - if not os.path.exists(crtm_dir): - raise FileNotFoundError( - dedent( - f""" - USE_CRTM has been set, but the external CRTM fix file directory: - CRTM_DIR = {crtm_dir} - could not be found.""" - ) - ) from None - except TypeError: - raise TypeError( - dedent( - f""" - USE_CRTM has been set, but the external CRTM fix file - directory (CRTM_DIR) is None. - """ - ) - ) from None - except FileNotFoundError: - raise - - # If performing sub-hourly model output and post-processing, check that - # the output interval DT_SUBHOURLY_POST_MNTS (in minutes) is specified - # correctly. - if post_config.get("SUB_HOURLY_POST"): - - # Subhourly post should be set with minutes between 1 and 59 for - # real subhourly post to be performed. - dt_subhourly_post_mnts = post_config.get("DT_SUBHOURLY_POST_MNTS") - if dt_subhourly_post_mnts == 0: - logger.warning( - f""" - When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), - DT_SUBHOURLY_POST_MNTS must be set to a value greater than 0; otherwise, - sub-hourly output is not really being performed: - DT_SUBHOURLY_POST_MNTS = \"{DT_SUBHOURLY_POST_MNTS}\" - Resetting SUB_HOURLY_POST to \"FALSE\". If you do not want this, you - must set DT_SUBHOURLY_POST_MNTS to something other than zero.""" - ) - post_config["SUB_HOURLY_POST"] = False - - if dt_subhourly_post_mnts < 1 or dt_subhourly_post_mnts > 59: - raise ValueError( - f''' - When SUB_HOURLY_POST is set to \"TRUE\", - DT_SUBHOURLY_POST_MNTS must be set to an integer between 1 and 59, - inclusive but: - DT_SUBHOURLY_POST_MNTS = \"{dt_subhourly_post_mnts}\"''' - ) - - # Check that DT_SUBHOURLY_POST_MNTS (after converting to seconds) is - # evenly divisible by the forecast model's main time step DT_ATMOS. - dt_atmos = fcst_config["DT_ATMOS"] - rem = dt_subhourly_post_mnts * 60 % dt_atmos - if rem != 0: - raise ValueError( - f""" - When SUB_HOURLY_POST is set to \"TRUE\") the post - processing interval in seconds must be evenly divisible - by the time step DT_ATMOS used in the forecast model, - i.e. the remainder must be zero. In this case, it is - not: - - DT_SUBHOURLY_POST_MNTS = \"{dt_subhourly_post_mnts}\" - DT_ATMOS = \"{dt_atmos}\" - remainder = (DT_SUBHOURLY_POST_MNTS*60) %% DT_ATMOS = {rem} - - Please reset DT_SUBHOURLY_POST_MNTS and/or DT_ATMOS so - that this remainder is zero.""" - ) - - # Make sure the post output domain is set - predef_grid_name = workflow_config.get("PREDEF_GRID_NAME") - post_output_domain_name = post_config.get("POST_OUTPUT_DOMAIN_NAME") - - if not post_output_domain_name: - if not predef_grid_name: - raise Exception( - f""" - The domain name used in naming the run_post output files - (POST_OUTPUT_DOMAIN_NAME) has not been set: - POST_OUTPUT_DOMAIN_NAME = \"{post_output_domain_name}\" - If this experiment is not using a predefined grid (i.e. if - PREDEF_GRID_NAME is set to a null string), POST_OUTPUT_DOMAIN_NAME - must be set in the configuration file (\"{user_config}\"). """ - ) - post_output_domain_name = predef_grid_name - - if not isinstance(post_output_domain_name, int): - post_output_domain_name = lowercase(post_output_domain_name) - - # Write updated value of POST_OUTPUT_DOMAIN_NAME back to dictionary - post_config["POST_OUTPUT_DOMAIN_NAME"] = post_output_domain_name - - # - # ----------------------------------------------------------------------- - # - # Set the output directory locations - # - # ----------------------------------------------------------------------- - # - - # These NCO variables need to be set based on the user's specified - # run environment. The default is set in config_defaults for nco. If - # running in community mode, we set these paths to the experiment - # directory. - nco_vars = [ - "opsroot_dfv", - "comroot_dfv", - "dataroot_dfv", - "dcomroot_dfv", - "comin_basedir", - "comout_basedir", - ] - - nco_config = expt_config["nco"] - - # Use env variables for NCO variables and create NCO directories - workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER") - if workflow_manager == "rocoto": - for nco_var in nco_vars: - envar = os.environ.get(nco_var) - if envar is not None: - nco_config[nco_var.upper()] = envar - - mkdir_vrfy(f' -p "{nco_config.get("OPSROOT_dfv")}"') - mkdir_vrfy(f' -p "{nco_config.get("COMROOT_dfv")}"') - mkdir_vrfy(f' -p "{nco_config.get("DATAROOT_dfv")}"') - mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT_dfv")}"') - mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR_dfv")}"') - if nco_config["DBNROOT_dfv"] and workflow_manager == "rocoto": - mkdir_vrfy(f' -p "{nco_config["DBNROOT_dfv"]}"') - - # create experiment dir - mkdir_vrfy(f' -p "{exptdir}"') - - # ----------------------------------------------------------------------- - # - # The FV3 forecast model needs the following input files in the run - # directory to start a forecast: - # - # (1) The data table file - # (2) The diagnostics table file - # (3) The field table file - # (4) The FV3 namelist file - # (5) The model configuration file - # (6) The NEMS configuration file - # (7) The CCPP physics suite definition file - # - # The workflow contains templates for the first six of these files. - # Template files are versions of these files that contain placeholder - # (i.e. dummy) values for various parameters. The experiment/workflow - # generation scripts copy these templates to appropriate locations in - # the experiment directory (either the top of the experiment directory - # or one of the cycle subdirectories) and replace the placeholders in - # these copies by actual values specified in the experiment/workflow - # configuration file (or derived from such values). The scripts then - # use the resulting "actual" files as inputs to the forecast model. - # - # Note that the CCPP physics suite definition file does not have a - # corresponding template file because it does not contain any values - # that need to be replaced according to the experiment/workflow - # configuration. If using CCPP, this file simply needs to be copied - # over from its location in the forecast model's directory structure - # to the experiment directory. - # - # Below, we first set the names of the templates for the first six files - # listed above. We then set the full paths to these template files. - # Note that some of these file names depend on the physics suite while - # others do not. - # - # ----------------------------------------------------------------------- - # - # Check for the CCPP_PHYSICS suite xml file - ccpp_phys_suite_in_ccpp_fp = workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"] - if not os.path.exists(ccpp_phys_suite_in_ccpp_fp): - raise FileNotFoundError( - f""" - The CCPP suite definition file (CCPP_PHYS_SUITE_IN_CCPP_FP) does not exist - in the local clone of the ufs-weather-model: - CCPP_PHYS_SUITE_IN_CCPP_FP = '{ccpp_phys_suite_in_ccpp_fp}'""" - ) - - # Check for the field dict file - field_dict_in_uwm_fp = workflow_config["FIELD_DICT_IN_UWM_FP"] - if not os.path.exists(field_dict_in_uwm_fp): - raise FileNotFoundError( - f""" - The field dictionary file (FIELD_DICT_IN_UWM_FP) does not exist - in the local clone of the ufs-weather-model: - FIELD_DICT_IN_UWM_FP = '{field_dict_in_uwm_fp}'""" - ) - - fixed_files = expt_config["fixed_files"] - # Set the appropriate ozone production/loss file paths and symlinks - ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( - ccpp_phys_suite_in_ccpp_fp, - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], - ) - - # Reset the dummy value saved in the last list item to the ozone - # file name - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn - - # Reset the experiment config list with the update list - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings - - log_info( - f""" - The ozone parameter used for this experiment is {ozone_param}. - """ - ) - - log_info( - f""" - The list that sets the mapping between symlinks in the cycle - directory, and the files in the FIXam directory has been updated - to include the ozone production/loss file. - """, - verbose=verbose, - ) - - log_info( - f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(ozone_link_mappings)} - """, - verbose=verbose, - dedent_=False, - ) - - # - # ----------------------------------------------------------------------- - # - # Check that the set of tasks to run in the workflow is internally - # consistent. - # - # ----------------------------------------------------------------------- - # - - # Ensemble verification can only be run in ensemble mode - do_ensemble = global_sect["DO_ENSEMBLE"] - run_task_vx_ensgrid = workflow_switches["RUN_TASK_VX_ENSGRID"] - run_task_vx_enspoint = workflow_switches["RUN_TASK_VX_ENSPOINT"] - if (not do_ensemble) and (run_task_vx_ensgrid or run_task_vx_enspoint): - raise Exception( - f''' - Ensemble verification can not be run unless running in ensemble mode: - DO_ENSEMBLE = \"{do_ensemble}\" - RUN_TASK_VX_ENSGRID = \"{run_task_vx_ensgrid}\" - RUN_TASK_VX_ENSPOINT = \"{run_task_vx_enspoint}\"''' - ) - - # Temporary solution to link fix directory for rocoto in AQM.v7 - #homeaqm = expt_config.get("user", {}).get("HOMEaqm") - #homeaqm_fix = os.path.join(homeaqm,"fix") - #if os.path.islink(homeaqm_fix) or os.path.exists(homeaqm_fix): - # rm_vrfy("-rf", homeaqm_fix) - #fixlam = workflow_config["FIXlam"] - #mkdir_vrfy(f' -p "{fixlam}"') - - # - # ----------------------------------------------------------------------- - # NOTE: currently this is executed no matter what, should it be dependent on the logic described below?? - # If not running the TN_MAKE_GRID, TN_MAKE_OROG, and/or TN_MAKE_SFC_CLIMO - # tasks, create symlinks under the FIXlam directory to pregenerated grid, - # orography, and surface climatology files. - # - # ----------------------------------------------------------------------- - # - # - # Use the pregenerated domain files if the RUN_TASK_MAKE* tasks are - # turned off. Link the files, and check that they all contain the - # same resolution input. - # - run_task_make_ics = workflow_switches['RUN_TASK_MAKE_LBCS'] - run_task_make_lbcs = workflow_switches['RUN_TASK_MAKE_ICS'] - run_task_run_fcst = workflow_switches['RUN_TASK_RUN_FCST'] - run_task_makeics_or_makelbcs_or_runfcst = run_task_make_ics or \ - run_task_make_lbcs or \ - run_task_run_fcst - # Flags for creating symlinks to pre-generated grid, orography, and sfc_climo files. - # These consider dependencies of other tasks on each pre-processing task. - create_symlinks_to_pregen_files = { - "GRID": (not workflow_switches['RUN_TASK_MAKE_GRID']) and \ - (run_task_make_orog or run_task_make_sfc_climo or run_task_makeics_or_makelbcs_or_runfcst), - "OROG": (not workflow_switches['RUN_TASK_MAKE_OROG']) and \ - (run_task_make_sfc_climo or run_task_makeics_or_makelbcs_or_runfcst), - "SFC_CLIMO": (not workflow_switches['RUN_TASK_MAKE_SFC_CLIMO']) and \ - (run_task_make_ics or run_task_make_lbcs), - } - - prep_tasks = ["GRID", "OROG", "SFC_CLIMO"] - res_in_fixlam_filenames = None - for prep_task in prep_tasks: - res_in_fns = "" - # If the user doesn't want to run the given task, link the fix - # file from the staged files. - if create_symlinks_to_pregen_files[prep_task]: - sect_key = f"task_make_{prep_task.lower()}" - dir_key = f"{prep_task}_DIR" - task_dir = expt_config[sect_key].get(dir_key) - - if not task_dir: - task_dir = os.path.join(pregen_basedir, predef_grid_name) - expt_config[sect_key][dir_key] = task_dir - msg = dedent( - f""" - {dir_key} will point to a location containing pre-generated files. - Setting {dir_key} = {task_dir} - """ - ) - logger.warning(msg) - - if not os.path.exists(task_dir): - msg = dedent( - f""" - File directory does not exist! - {dir_key} needs {task_dir} - """ - ) - raise FileNotFoundError(msg) - - # Link the fix files and check that their resolution is consistent - res_in_fns = link_fix( - verbose=verbose, - file_group=prep_task.lower(), - source_dir=task_dir, - target_dir=workflow_config["FIXlam"], - ccpp_phys_suite=workflow_config["CCPP_PHYS_SUITE"], - constants=expt_config["constants"], - dot_or_uscore=workflow_config["DOT_OR_USCORE"], - nhw=grid_params["NHW"], - run_task=False, - sfc_climo_fields=fixed_files["SFC_CLIMO_FIELDS"], - ) - if not res_in_fixlam_filenames: - res_in_fixlam_filenames = res_in_fns - else: - if res_in_fixlam_filenames != res_in_fns: - raise Exception( - dedent( - f""" - The resolution of the pregenerated files for - {prep_task} do not match those that were alread - set: - - Resolution in {prep_task}: {res_in_fns} - Resolution expected: {res_in_fixlam_filenames} - """ - ) - ) - - if not os.path.exists(task_dir): - raise FileNotFoundError( - f''' - The directory ({dir_key}) that should contain the pregenerated - {prep_task.lower()} files does not exist: - {dir_key} = \"{task_dir}\"''' - ) - - workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filenames - workflow_config["CRES"] = f"C{res_in_fixlam_filenames}" - - # Temporary solution to link fix directory for rocoto in AQM.v7 - #homeaqm = expt_config.get("user", {}).get("HOMEaqm") - #homeaqm_fix = os.path.join(homeaqm,"fix") - #if os.path.exists(homeaqm_fix): - # rm_vrfy("-rf", homeaqm_fix) - #fixaqm_sav = expt_config["platform"].get("FIXaqm_sav") - #ln_vrfy(f"""-fsn {fixaqm_sav} {homeaqm_fix}""") - - # - # ----------------------------------------------------------------------- - # - # Turn off post task if it's not consistent with the forecast's - # user-setting of WRITE_DOPOST - # - # ----------------------------------------------------------------------- - # - if fcst_config["WRITE_DOPOST"]: - # Turn off run_post - if workflow_switches["RUN_TASK_RUN_POST"]: - logger.warning( - dedent( - f""" - Inline post is turned on, deactivating post-processing tasks: - RUN_TASK_RUN_POST = False - """ - ) - ) - workflow_switches["RUN_TASK_RUN_POST"] = False - - # Check if SUB_HOURLY_POST is on - if expt_config["task_run_post"]["SUB_HOURLY_POST"]: - raise Exception( - f""" - SUB_HOURLY_POST is NOT available with Inline Post yet.""" - ) - # - # ----------------------------------------------------------------------- - # - # Call the function that checks whether the RUC land surface model (LSM) - # is being called by the physics suite and sets the workflow variable - # SDF_USES_RUC_LSM to True or False accordingly. - # - # ----------------------------------------------------------------------- - # - workflow_config["SDF_USES_RUC_LSM"] = check_ruc_lsm( - ccpp_phys_suite_fp=ccpp_phys_suite_in_ccpp_fp - ) - # - # ----------------------------------------------------------------------- - # - # Check if the Thompson microphysics parameterization is being - # called by the physics suite and modify certain workflow arrays to - # ensure that fixed files needed by this parameterization are copied - # to the FIXam directory and appropriate symlinks to them are - # created in the run directories. Set the boolean flag - # SDF_USES_THOMPSON_MP to indicates whether Thompson MP is called by - # the physics suite. - # - # ----------------------------------------------------------------------- - # - - link_thompson_climo = ( - get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] - ) or (get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]) - use_thompson, mapping, fix_files = set_thompson_mp_fix_files( - ccpp_phys_suite_fp=ccpp_phys_suite_in_ccpp_fp, - thompson_mp_climo_fn=workflow_config["THOMPSON_MP_CLIMO_FN"], - link_thompson_climo=link_thompson_climo, - ) - - workflow_config["SDF_USES_THOMPSON_MP"] = use_thompson - - if use_thompson: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].extend(mapping) - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fix_files) - - log_info( - f""" - Since the Thompson microphysics parameterization is being used by this - physics suite (CCPP_PHYS_SUITE), the names of the fixed files needed by - this scheme have been appended to the array FIXgsm_FILES_TO_COPY_TO_FIXam, - and the mappings between these files and the symlinks that need to be - created in the cycle directories have been appended to the array - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING. After these modifications, the - values of these parameters are as follows: - - CCPP_PHYS_SUITE = \"{workflow_config["CCPP_PHYS_SUITE"]}\" - """ - ) - log_info( - f""" - FIXgsm_FILES_TO_COPY_TO_FIXam = - {list_to_str(fixed_files['FIXgsm_FILES_TO_COPY_TO_FIXam'])} - """ - ) - log_info( - f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = - {list_to_str(fixed_files['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'])} - """ - ) - # - # ----------------------------------------------------------------------- - # - # Generate var_defns.sh file in the EXPTDIR. This file contains all - # the user-specified settings from expt_config. - # - # ----------------------------------------------------------------------- - # - - extend_yaml(expt_config) - for sect, sect_keys in expt_config.items(): - for k, v in sect_keys.items(): - expt_config[sect][k] = str_to_list(v) - extend_yaml(expt_config) - - # print content of var_defns if DEBUG=True - all_lines = cfg_to_yaml_str(expt_config) - log_info(all_lines, verbose=debug) - - global_var_defns_fp = workflow_config["GLOBAL_VAR_DEFNS_FP"] - # print info message - log_info( - f""" - Generating the global experiment variable definitions file here: - GLOBAL_VAR_DEFNS_FP = '{global_var_defns_fp}' - For more detailed information, set DEBUG to 'TRUE' in the experiment - configuration file ('{user_config_fn}').""" - ) - - with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(expt_config)) - - # - # ----------------------------------------------------------------------- - # - # Check validity of parameters in one place, here in the end. - # - # ----------------------------------------------------------------------- - # - - # loop through the flattened expt_config and check validity of params - cfg_v = load_config_file(os.path.join(USHdir, "valid_param_vals.yaml")) - for k, v in flatten_dict(expt_config).items(): - if v is None or v == "": - continue - vkey = "valid_vals_" + k - if (vkey in cfg_v) and not (v in cfg_v[vkey]): - raise Exception( - f""" - The variable {k}={v} in the user's configuration - does not have a valid value. Possible values are: - {k} = {cfg_v[vkey]}""" - ) - - return expt_config - - -# -# ----------------------------------------------------------------------- -# -# Call the function defined above. -# -# ----------------------------------------------------------------------- -# -if __name__ == "__main__": - USHdir = os.path.dirname(os.path.abspath(__file__)) - setup(USHdir) diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py deleted file mode 100755 index 7207316298..0000000000 --- a/ush/update_input_nml.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import argparse -import unittest -import logging -from textwrap import dedent - -from python_utils import ( - import_vars, - print_input_args, - print_info_msg, - print_err_msg_exit, - cfg_to_yaml_str, - load_shell_config, - flatten_dict, -) - -from set_namelist import set_namelist - - -def update_input_nml(run_dir): - """Update the FV3 input.nml file in the specified run directory - - Args: - run_dir: run directory - Returns: - Boolean - """ - - print_input_args(locals()) - - # import all environment variables - import_vars() - - # - # ----------------------------------------------------------------------- - # - # Update the FV3 input.nml file in the specified run directory. - # - # ----------------------------------------------------------------------- - # - print_info_msg( - f""" - Updating the FV3 input.nml file in the specified run directory (run_dir): - run_dir = '{run_dir}'""", - verbose=VERBOSE, - ) - # - # ----------------------------------------------------------------------- - # - # Set new values of the specific parameters to be updated. - # - # ----------------------------------------------------------------------- - # - settings = {} - - # For restart run - if args.restart: - settings["fv_core_nml"] = { - "external_ic": False, - "make_nh": False, - "mountain": True, - "na_init": 0, - "nggps_ic": False, - "warm_start": True, - } - - settings["gfs_physics_nml"] = { - "nstf_name": [2, 0, 0, 0, 0], - } - - - settings_str = cfg_to_yaml_str(settings) - - print_info_msg( - dedent( - f""" - The variable 'settings' specifying values to be used in the FV3 'input.nml' - file for restart has been set as follows:\n - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, - ) - # - # ----------------------------------------------------------------------- - # - # Call a python script to update the experiment's actual FV3 INPUT.NML - # file for restart. - # - # ----------------------------------------------------------------------- - # - fv3_input_nml_fp = os.path.join(run_dir, FV3_NML_FN) - - try: - set_namelist( - [ - "-q", - "-n", - fv3_input_nml_fp, - "-u", - settings_str, - "-o", - fv3_input_nml_fp, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Full path to output namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - return False - - return True - - -def parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") - - parser.add_argument( - "-r", "--run_dir", - dest="run_dir", - required=True, - help="Run directory." - ) - - parser.add_argument( - "-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) - - parser.add_argument( - "--restart", - action='store_true', - help='Update for restart') - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - update_input_nml( - run_dir=args.run_dir, - ) diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml deleted file mode 100755 index c929a2d798..0000000000 --- a/ush/valid_param_vals.yaml +++ /dev/null @@ -1,108 +0,0 @@ -# -# Define valid values for various global experiment/workflow variables. -# -valid_vals_VERBOSE: [True, False] -valid_vals_DEBUG: [True, False] -valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "JET", "ODIN", "CHEYENNE", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"] -valid_vals_SCHED: ["slurm", "pbspro", "lsf", "lsfcray", "none"] -valid_vals_FCST_MODEL: ["ufs-weather-model"] -valid_vals_WORKFLOW_MANAGER: ["rocoto", "ecflow", "none"] -valid_vals_PREDEF_GRID_NAME: [ -"RRFS_CONUS_25km", -"RRFS_CONUS_13km", -"RRFS_CONUS_3km", -"RRFS_CONUScompact_25km", -"RRFS_CONUScompact_13km", -"RRFS_CONUScompact_3km", -"RRFS_SUBCONUS_3km", -"RRFS_AK_13km", -"RRFS_AK_3km", -"CONUS_25km_GFDLgrid", -"CONUS_3km_GFDLgrid", -"EMC_AK", -"EMC_HI", -"EMC_PR", -"EMC_GU", -"GSL_HAFSV0.A_25km", -"GSL_HAFSV0.A_13km", -"GSL_HAFSV0.A_3km", -"GSD_HRRR_AK_50km", -"GSD_HRRR_25km", -"AQM_NA_13km", -"RRFS_NA_13km", -"RRFS_NA_3km", -"SUBCONUS_Ind_3km", -"WoFS_3km" -] -valid_vals_CCPP_PHYS_SUITE: [ -"FV3_GFS_2017_gfdlmp", -"FV3_GFS_2017_gfdlmp_regional", -"FV3_GFS_v15p2", -"FV3_GFS_v15_thompson_mynn_lam3km", -"FV3_GFS_v16", -"FV3_GFS_v17_p8", -"FV3_RRFS_v1beta", -"FV3_WoFS_v0", -"FV3_HRRR" -] -valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072] -valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] -valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] -valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False] -valid_vals_FV3GFS_FILE_FMT_ICS: ["nemsio", "grib2", "netcdf"] -valid_vals_FV3GFS_FILE_FMT_LBCS: ["nemsio", "grib2", "netcdf"] -valid_vals_GRID_GEN_METHOD: ["GFDLgrid", "ESGgrid"] -valid_vals_PREEXISTING_DIR_METHOD: ["delete", "rename", "quit"] -valid_vals_GTYPE: ["regional"] -valid_vals_WRTCMP_output_grid: ["rotated_latlon", "lambert_conformal", "regional_latlon"] -valid_vals_RUN_TASK_MAKE_GRID: [True, False] -valid_vals_RUN_TASK_MAKE_OROG: [True, False] -valid_vals_RUN_TASK_MAKE_SFC_CLIMO: [True, False] -valid_vals_RUN_TASK_RUN_POST: [True, False] -valid_vals_RUN_TASK_RUN_PRDGEN: [True, False] -valid_vals_DO_PARALLEL_PRDGEN: [True, False] -valid_vals_WRITE_DOPOST: [True, False] -valid_vals_RUN_TASK_VX_GRIDSTAT: [True, False] -valid_vals_RUN_TASK_VX_POINTSTAT: [True, False] -valid_vals_RUN_TASK_VX_ENSGRID: [True, False] -valid_vals_RUN_TASK_VX_ENSPOINT: [True, False] -valid_vals_QUILTING: [True, False] -valid_vals_PRINT_ESMF: [True, False] -valid_vals_USE_CRON_TO_RELAUNCH: [True, False] -valid_vals_DOT_OR_USCORE: [".", "_"] -valid_vals_NOMADS: [True, False] -valid_vals_NOMADS_file_type: ["GRIB2", "grib2", "NEMSIO", "nemsio"] -valid_vals_DO_ENSEMBLE: [True, False] -valid_vals_USE_CUSTOM_POST_CONFIG_FILE: [True, False] -valid_vals_USE_CRTM: [True, False] -valid_vals_DO_SHUM: [True, False] -valid_vals_DO_SPPT: [True, False] -valid_vals_DO_SPP: [True, False] -valid_vals_DO_LSM_SPP: [True, False] -valid_vals_DO_SKEB: [True, False] -valid_vals_USE_ZMTNBLCK: [True, False] -valid_vals_USE_FVCOM: [True, False] -valid_vals_FVCOM_WCSTART: ["warm", "WARM", "cold", "COLD"] -valid_vals_COMPILER: ["intel", "gnu"] -valid_vals_SUB_HOURLY_POST: [True, False] -valid_vals_DT_SUBHOURLY_POST_MNTS: [0, 1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30] -valid_vals_USE_MERRA_CLIMO: [True, False] -valid_vals_CPL_AQM: [True, False] -valid_vals_RUN_TASK_AQM_ICS: [True, False] -valid_vals_RUN_TASK_AQM_LBCS: [True, False] -valid_vals_RUN_TASK_NEXUS_GFS_SFC: [True, False] -valid_vals_RUN_TASK_NEXUS_EMISSION: [True, False] -valid_vals_RUN_TASK_FIRE_EMISSION: [True, False] -valid_vals_RUN_TASK_POINT_SOURCE: [True, False] -valid_vals_RUN_TASK_PRE_POST_STAT: [True, False] -valid_vals_RUN_TASK_POST_STAT_O3: [True, False] -valid_vals_RUN_TASK_POST_STAT_PM25: [True, False] -valid_vals_RUN_TASK_BIAS_CORRECTION_O3: [True, False] -valid_vals_RUN_TASK_BIAS_CORRECTION_PM25: [True, False] -valid_vals_DO_AQM_DUST: [True, False] -valid_vals_DO_AQM_CANOPY: [True, False] -valid_vals_DO_AQM_PRODUCT: [True, False] -valid_vals_DO_AQM_CHEM_LBCS: [True, False] -valid_vals_DO_AQM_GEFS_LBCS: [True, False] -valid_vals_DO_AQM_SAVE_AIRNOW_HIST: [True, False] -valid_vals_COLDSTART: [True, False] From 9d1d029f5be6bfc5709ac8080c5df9c2a95ca89d Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Mon, 20 Nov 2023 02:54:34 +0000 Subject: [PATCH 05/24] update ecflow job card for forecast job --- ecf/scripts/forecast/jaqm_forecast.ecf | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ecf/scripts/forecast/jaqm_forecast.ecf b/ecf/scripts/forecast/jaqm_forecast.ecf index 49f03d3ef1..4c1071707e 100644 --- a/ecf/scripts/forecast/jaqm_forecast.ecf +++ b/ecf/scripts/forecast/jaqm_forecast.ecf @@ -22,12 +22,9 @@ module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load jasper/${jasper_ver} -module load zlib/${zlib_ver} -module load libpng/${libpng_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load fms/${fms_ver} -module load bacio/${bacio_ver} module load crtm/${crtm_ver} module load g2/${g2_ver} module load g2tmpl/${g2tmpl_ver} @@ -38,7 +35,6 @@ module load pio/${pio_ver} module load esmf/${esmf_ver} module load libjpeg/${libjpeg_ver} module load python/${python_ver} -module load prod_util/${prod_util_ver} module list From b3f14c29e4deda783c750506e6157dc874d92079 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Mon, 20 Nov 2023 03:05:05 +0000 Subject: [PATCH 06/24] update make_ics, make_lbcs, job_preamble --- ecf/scripts/prep/jaqm_lbcs.ecf | 1 - scripts/exaqm_make_ics.sh | 2 +- scripts/exaqm_make_lbcs.sh | 2 +- ush/job_preamble.sh | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/ecf/scripts/prep/jaqm_lbcs.ecf b/ecf/scripts/prep/jaqm_lbcs.ecf index df4d7ed97c..eb85c3b55c 100644 --- a/ecf/scripts/prep/jaqm_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_lbcs.ecf @@ -24,7 +24,6 @@ module load jasper/${jasper_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load nemsio/${nemsio_ver} -#module load nemsiogfs/${nemsiogfs_ver} module load udunits/${udunits_ver} module load gsl/${gsl_ver} module load nco/${nco_ver} diff --git a/scripts/exaqm_make_ics.sh b/scripts/exaqm_make_ics.sh index 88ec2fdb99..1e2a768304 100755 --- a/scripts/exaqm_make_ics.sh +++ b/scripts/exaqm_make_ics.sh @@ -150,7 +150,7 @@ if [ "${RUN_TASK_GET_EXTRN_ICS}" = "FALSE" ]; then EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.ICS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" cmd=" - python3 -u ${USHaqm}/retrieve_data.py \ + ${USHaqm}/retrieve_data.py \ --debug \ --symlink \ --file_set ${file_set} \ diff --git a/scripts/exaqm_make_lbcs.sh b/scripts/exaqm_make_lbcs.sh index c44e8fd325..35bf91f006 100755 --- a/scripts/exaqm_make_lbcs.sh +++ b/scripts/exaqm_make_lbcs.sh @@ -152,7 +152,7 @@ if [ "${RUN_TASK_GET_EXTRN_LBCS}" = "FALSE" ]; then EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.LBCS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" cmd=" - python3 -u ${USHaqm}/retrieve_data.py \ + ${USHaqm}/retrieve_data.py \ --debug \ --symlink \ --file_set ${file_set} \ diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 082564705c..6b092c56b9 100755 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -25,7 +25,7 @@ export NET="${NET:-${NET_dfv}}" export RUN="${RUN:-${RUN_dfv}}" export model_ver="${model_ver:-${model_ver_dfv}}" export COMROOT="${COMROOT:-${COMROOT_dfv}}" -export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_dfv}}" +#export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_dfv}}" export KEEPDATA="${KEEPDATA:-${KEEPDATA_dfv}}" export MAILTO="${MAILTO:-${MAILTO_dfv}}" From d6963f354ebb00d5f46744bc1591834f15c4992a Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Mon, 20 Nov 2023 16:07:32 +0000 Subject: [PATCH 07/24] update make_ics.ecf and delete nexus_emission_00-06.ecf --- ecf/scripts/nexus/jaqm_nexus_emission_00.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_emission_01.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_emission_02.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_emission_03.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_emission_04.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_emission_05.ecf | 1 - ecf/scripts/prep/jaqm_make_ics.ecf | 6 ------ 7 files changed, 12 deletions(-) delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_00.ecf delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_01.ecf delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_02.ecf delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_03.ecf delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_04.ecf delete mode 120000 ecf/scripts/nexus/jaqm_nexus_emission_05.ecf diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_00.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_01.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_02.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_03.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_04.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf deleted file mode 120000 index 8868b7f417..0000000000 --- a/ecf/scripts/nexus/jaqm_nexus_emission_05.ecf +++ /dev/null @@ -1 +0,0 @@ -jaqm_nexus_emission_master.ecf \ No newline at end of file diff --git a/ecf/scripts/prep/jaqm_make_ics.ecf b/ecf/scripts/prep/jaqm_make_ics.ecf index 36d6472522..2332fc0c93 100644 --- a/ecf/scripts/prep/jaqm_make_ics.ecf +++ b/ecf/scripts/prep/jaqm_make_ics.ecf @@ -33,12 +33,6 @@ module list ############################################################ ${HOMEaqm}/jobs/JAQM_MAKE_ICS -if [ $? -ne 0 ]; then - ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" - ecflow_client --abort - exit -fi - %include %manual From ab2815da42892be05d645e0562b1c63259abb0ba Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Tue, 21 Nov 2023 03:57:38 +0000 Subject: [PATCH 08/24] upate J-jobs and delete unused module files --- ecf/scripts/forecast/jaqm_forecast.ecf | 1 - .../nexus/jaqm_nexus_emission_master.ecf | 1 - ecf/scripts/nexus/jaqm_nexus_post_split.ecf | 1 - ecf/scripts/post/jaqm_post_master.ecf | 1 - ecf/scripts/prep/jaqm_ics.ecf | 1 - ecf/scripts/prep/jaqm_lbcs.ecf | 1 - ecf/scripts/prep/jaqm_make_ics.ecf | 1 - ecf/scripts/prep/jaqm_make_lbcs.ecf | 1 - .../product/jaqm_bias_correction_o3.ecf | 1 - .../product/jaqm_bias_correction_pm25.ecf | 1 - ecf/scripts/product/jaqm_post_stat_o3.ecf | 1 - ecf/scripts/product/jaqm_post_stat_pm25.ecf | 1 - ecf/scripts/product/jaqm_pre_post_stat.ecf | 1 - .../pts_fire_emis/jaqm_fire_emission.ecf | 8 ---- .../pts_fire_emis/jaqm_point_source.ecf | 1 - jobs/JAQM_BIAS_CORRECTION_O3 | 6 +-- jobs/JAQM_BIAS_CORRECTION_PM25 | 5 +-- jobs/JAQM_FIRE_EMISSION | 5 +-- jobs/JAQM_FORECAST | 4 +- jobs/JAQM_ICS | 5 +-- jobs/JAQM_LBCS | 5 +-- jobs/JAQM_MAKE_ICS | 5 +-- jobs/JAQM_MAKE_LBCS | 5 +-- jobs/JAQM_NEXUS_EMISSION | 5 +-- jobs/JAQM_NEXUS_POST_SPLIT | 5 +-- jobs/JAQM_POINT_SOURCE | 5 +-- jobs/JAQM_POST | 26 +++++------ jobs/JAQM_POST_STAT_O3 | 5 +-- jobs/JAQM_POST_STAT_PM25 | 5 +-- jobs/JAQM_PRE_POST_STAT | 5 +-- modulefiles/srw_common.lua | 31 ------------- modulefiles/srw_common_spack.lua | 30 ------------- modulefiles/wflow_wcoss2.lua | 13 ------ parm/machine/wcoss2.yaml | 43 ------------------- ush/job_preamble.sh | 1 + ush/module_wcoss2 | 3 -- ush/preamble.sh | 2 +- versions/run.ver | 3 -- 38 files changed, 29 insertions(+), 215 deletions(-) delete mode 100644 modulefiles/srw_common.lua delete mode 100644 modulefiles/srw_common_spack.lua delete mode 100644 modulefiles/wflow_wcoss2.lua delete mode 100644 parm/machine/wcoss2.yaml delete mode 100755 ush/module_wcoss2 diff --git a/ecf/scripts/forecast/jaqm_forecast.ecf b/ecf/scripts/forecast/jaqm_forecast.ecf index 4c1071707e..313f01743a 100644 --- a/ecf/scripts/forecast/jaqm_forecast.ecf +++ b/ecf/scripts/forecast/jaqm_forecast.ecf @@ -16,7 +16,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf index c241c9c2e3..8134ed996b 100644 --- a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf @@ -16,7 +16,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf index b34477c7da..a57f5e2a54 100644 --- a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/post/jaqm_post_master.ecf b/ecf/scripts/post/jaqm_post_master.ecf index 65c7796cc8..fad228c918 100644 --- a/ecf/scripts/post/jaqm_post_master.ecf +++ b/ecf/scripts/post/jaqm_post_master.ecf @@ -16,7 +16,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/prep/jaqm_ics.ecf b/ecf/scripts/prep/jaqm_ics.ecf index cdcac8c50a..b214131336 100644 --- a/ecf/scripts/prep/jaqm_ics.ecf +++ b/ecf/scripts/prep/jaqm_ics.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/prep/jaqm_lbcs.ecf b/ecf/scripts/prep/jaqm_lbcs.ecf index eb85c3b55c..6feff0fb68 100644 --- a/ecf/scripts/prep/jaqm_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_lbcs.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/prep/jaqm_make_ics.ecf b/ecf/scripts/prep/jaqm_make_ics.ecf index 2332fc0c93..844eb3b69f 100644 --- a/ecf/scripts/prep/jaqm_make_ics.ecf +++ b/ecf/scripts/prep/jaqm_make_ics.ecf @@ -16,7 +16,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/prep/jaqm_make_lbcs.ecf b/ecf/scripts/prep/jaqm_make_lbcs.ecf index 2eb2f0c7c9..ebbc16af47 100644 --- a/ecf/scripts/prep/jaqm_make_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_make_lbcs.ecf @@ -16,7 +16,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/product/jaqm_bias_correction_o3.ecf b/ecf/scripts/product/jaqm_bias_correction_o3.ecf index 971839e83c..564108daa8 100644 --- a/ecf/scripts/product/jaqm_bias_correction_o3.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_o3.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf index 249d71646e..410a32868e 100644 --- a/ecf/scripts/product/jaqm_bias_correction_pm25.ecf +++ b/ecf/scripts/product/jaqm_bias_correction_pm25.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/product/jaqm_post_stat_o3.ecf b/ecf/scripts/product/jaqm_post_stat_o3.ecf index 7fcfdefb1d..276307184d 100644 --- a/ecf/scripts/product/jaqm_post_stat_o3.ecf +++ b/ecf/scripts/product/jaqm_post_stat_o3.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/product/jaqm_post_stat_pm25.ecf b/ecf/scripts/product/jaqm_post_stat_pm25.ecf index 6166e62193..21652ced73 100644 --- a/ecf/scripts/product/jaqm_post_stat_pm25.ecf +++ b/ecf/scripts/product/jaqm_post_stat_pm25.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/product/jaqm_pre_post_stat.ecf b/ecf/scripts/product/jaqm_pre_post_stat.ecf index c57be052e6..4b3d2834a2 100644 --- a/ecf/scripts/product/jaqm_pre_post_stat.ecf +++ b/ecf/scripts/product/jaqm_pre_post_stat.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf index 73555c3a23..ab5b3f38e2 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} @@ -29,13 +28,6 @@ module load python/${python_ver} module list -#export subcyc=0 -#export pid=${pid:-$$} -#export outid=${outid:-"LL$job"} -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#. ${HOMEaqm}/parm/config/var_defns.sh -#. ${HOMEaqm}/ush/source_util_funcs.sh - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf index 0886281956..d8bf54ec47 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf @@ -15,7 +15,6 @@ export cyc="%CYC%" ############################################################ # Load modules ############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} module load intel/${intel_ver} module load craype/${craype_ver} module load cray-mpich/${cray_mpich_ver} diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 8d9a45d646..e5cfd1b8b3 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -105,11 +105,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_o3.sh export err=$?; err_chk # -# -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index d98f56fa2b..95d8ddfd31 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -104,10 +104,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_pm25.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index 1251670476..a9bad08e8b 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -121,10 +121,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_fire_emission.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index e17b13c26c..dffcc3c322 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -94,9 +94,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_forecast.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 1cb54e7b9c..0c0810adca 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -99,10 +99,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_ics.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 8e5515a4aa..fe0fdae697 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -102,10 +102,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_lbcs.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index df8cf5a99c..9a933389ab 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -118,10 +118,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_ics.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 566296b26d..3efb751e63 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -122,10 +122,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_lbcs.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index d9b4e4e5f6..d4a4eca6de 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -119,10 +119,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_emission.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index e48efea9ac..29a1a4a490 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -108,10 +108,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_post_split.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index d205fee77a..731ed53d30 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -95,10 +95,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_point_source.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index a7f3e15e5c..f89fa8ae5a 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -146,19 +146,19 @@ if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then fi # fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) -if [ "${fhr}" = "${fcst_len_hrs}" ]; then - if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA - share_pid="${WORKFLOW_ID}_${PDY}${cyc}" - rm -rf *${share_pid} - fi -else - if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA - fi -fi +#if [ "${fhr}" = "${fcst_len_hrs}" ]; then +# if [ "${KEEPDATA}" = "FALSE" ]; then +# cd ${DATAROOT} +# rm -rf $DATA +# share_pid="${WORKFLOW_ID}_${PDY}${cyc}" +# rm -rf *${share_pid} +# fi +#else +# if [ "${KEEPDATA}" = "FALSE" ]; then +# cd ${DATAROOT} +# rm -rf $DATA +# fi +#fi # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 5074f3ac2e..13fe59275c 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -95,10 +95,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_o3.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index bdbdbdd2dd..c0b8fb627b 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -96,10 +96,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_pm25.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 2b3b03585b..724039e2ac 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -87,10 +87,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_pre_post_stat.sh export err=$?; err_chk # -if [ "${KEEPDATA}" = "FALSE" ]; then - cd ${DATAROOT} - rm -rf $DATA -fi + # Print exit message print_info_msg " ======================================================================== diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua deleted file mode 100644 index 9eb529294c..0000000000 --- a/modulefiles/srw_common.lua +++ /dev/null @@ -1,31 +0,0 @@ -load("jasper/2.0.25") -load("zlib/1.2.11") -load_any("png/1.6.35", "libpng/1.6.37") - -load_any("netcdf/4.7.4", "netcdf-c/4.7.4") -load_any("netcdf/4.7.4", "netcdf-fortran/4.5.4") -load_any("pio/2.5.7", "parallelio/2.5.2") -load_any("esmf/8.3.0b09", "esmf/8.2.0") -load("fms/2022.04") - -load("bufr/11.7.0") -load("bacio/2.4.1") -load("crtm/2.4.0") -load("g2/3.4.5") -load("g2tmpl/1.10.2") -load("ip/3.3.3") -load("sp/2.3.3") -load("w3emc/2.9.2") - -load_any("gftl-shared/v1.5.0", "gftl-shared/1.5.0") -load_any("yafyaml/v0.5.1", "yafyaml/0.5.1") -load_any("mapl/2.22.0-esmf-8.3.0b09", "mapl/2.11.0-esmf-8.2.0") - -load("nemsio/2.5.4") -load("sfcio/1.4.1") -load("sigio/2.3.2") -load("w3nco/2.4.1") -load("wrf_io/1.2.0") - -load("ncio/1.1.2") -load("wgrib2/2.0.8") diff --git a/modulefiles/srw_common_spack.lua b/modulefiles/srw_common_spack.lua deleted file mode 100644 index 8e0a607a9d..0000000000 --- a/modulefiles/srw_common_spack.lua +++ /dev/null @@ -1,30 +0,0 @@ -load("jasper/2.0.25") -load("zlib/1.2.11") -load("libpng/1.6.37") -load("netcdf-c/4.7.4") -load("netcdf-fortran/4.5.4") -load("pio/2.5.3") -load("esmf/8.3.0b09") -load("fms/2022.01") - -load("bufr/11.7.0") -load("bacio/2.4.1") -load("crtm/2.3.0") -load("g2/3.4.5") -load("g2tmpl/1.10.0") -load("ip/3.3.3") -load("sp/2.3.3") -load("w3emc/2.9.2") - -load("gftl-shared/1.5.0") -load("yafyaml/0.5.1") -load("mapl/2.22.0-esmf-8.3.0b09-esmf-8.3.0") - -load("nemsio/2.5.4") -load("sfcio/1.4.1") -load("sigio/2.3.2") -load("w3nco/2.4.1") -load("wrf_io/1.2.0") - -load("ncio/1.1.2") -load("wgrib2/2.0.8") diff --git a/modulefiles/wflow_wcoss2.lua b/modulefiles/wflow_wcoss2.lua deleted file mode 100644 index a061b93323..0000000000 --- a/modulefiles/wflow_wcoss2.lua +++ /dev/null @@ -1,13 +0,0 @@ -help([[ -This module loads python environement for running the UFS SRW App on -the NOAA operational machine WCOSS2 (Cactus/Dogwood)" -]]) - -whatis([===[Loads libraries needed for running the UFS SRW App on WCOSS2 ]===]) - -load(pathJoin("intel", os.getenv("intel_ver"))) -load(pathJoin("python", os.getenv("python_ver"))) - -prepend_path("MODULEPATH","/apps/ops/test/nco/modulefiles") -load(pathJoin("core/rocoto", os.getenv("rocoto_ver"))) - diff --git a/parm/machine/wcoss2.yaml b/parm/machine/wcoss2.yaml deleted file mode 100644 index f67b202b71..0000000000 --- a/parm/machine/wcoss2.yaml +++ /dev/null @@ -1,43 +0,0 @@ -platform: - WORKFLOW_MANAGER: rocoto - NCORES_PER_NODE: 128 - SCHED: pbspro - FIXaqm_sav: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix - METPLUS_PATH: /apps/ops/para/libs/intel/19.1.3.304/metplus/4.1.1 - MET_BIN_EXEC: bin - MET_INSTALL_DIR: /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1 - DOMAIN_PREGEN_BASEDIR: ${FIXaqm}/ufs/FV3LAM_pregen - QUEUE_DEFAULT: dev - QUEUE_FCST: dev - QUEUE_HPSS: dev_transfer - RUN_CMD_FCST: mpiexec -n ${PE_MEMBER01} -ppn ${PPN_RUN_FCST} --cpu-bind core -depth ${OMP_NUM_THREADS_RUN_FCST} - RUN_CMD_POST: mpiexec -n ${nprocs} - RUN_CMD_PRDGEN: mpiexec -n ${nprocs} --cpu-bind core cfp - RUN_CMD_SERIAL: time - RUN_CMD_UTILS: mpiexec -n ${nprocs} - RUN_CMD_AQM: mpiexec -n ${nprocs} -ppn ${ppn_run_aqm} --cpu-bind core -depth ${omp_num_threads_run_aqm} - RUN_CMD_AQMLBC: mpiexec -n ${NUMTS} - SCHED_NATIVE_CMD: -l place=excl - PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' - FIXaer: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/fix_aer - FIXgsm: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/fix_am - FIXlut: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/fix_lut - FIXorg: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/fix_orog - FIXsfc: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/fix_sfc_climo - FIXshp: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/fix/ufs/NaturalEarth - EXTRN_MDL_DATA_STORES: hpss aws nomads -data: - ics_lbcs: - GSMGFS: compath.py ${envir}/gsmgfs/${gsmgfs_ver}/gsmgfs.${PDYext} - FV3GFS: compath.py ${envir}/gfs/${gfs_ver}/gfs.${PDYext}/${cycext}/atmos - RAP: compath.py ${envir}/rap/${rap_ver}/rap.${PDYext} - NAM: compath.py ${envir}/nam/${nam_ver}/nam.${PDYext} - HRRR: compath.py ${envir}/hrrr/${hrrr_ver}/hrrr.${PDYext}/conus -cpl_aqm_parm: - COMINgefs: /lfs/h1/ops/prod/com/gefs/v12.3 - DCOMINairnow: /lfs/h1/ops/prod/dcom - COMINemis: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/emissions - COMINemispt: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/emissions/nei2016v1-pt - DCOMINfire: /lfs/h1/ops/prod/dcom - COMINbicor: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/bias_correction/aqmv7.0.88b - COMOUTbicor: /lfs/h2/emc/physics/noscrub/UFS_SRW_App/aqm.v7/bias_correction/aqmv7.0.88b diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 6b092c56b9..0fb80f7f45 100755 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -1,5 +1,6 @@ #!/bin/bash +set -ax # #----------------------------------------------------------------------- # If requested to share data with next task, override jobid diff --git a/ush/module_wcoss2 b/ush/module_wcoss2 deleted file mode 100755 index 137684856b..0000000000 --- a/ush/module_wcoss2 +++ /dev/null @@ -1,3 +0,0 @@ -source ../versions/run.ver -module use ../modulefiles -module load wflow_wcoss2 diff --git a/ush/preamble.sh b/ush/preamble.sh index 0572905d55..c9b2069b1b 100755 --- a/ush/preamble.sh +++ b/ush/preamble.sh @@ -22,7 +22,7 @@ # #----------------------------------------------------------------------- # -set +x +set -x # #----------------------------------------------------------------------- diff --git a/versions/run.ver b/versions/run.ver index e1d4a30fea..2be391ac76 100644 --- a/versions/run.ver +++ b/versions/run.ver @@ -1,10 +1,8 @@ #!/bin/bash -export PrgEnv_intel_ver=8.1.0 export intel_ver=19.1.3.304 export craype_ver=2.7.13 export cray_mpich_ver=8.1.12 -export cmake_ver=3.20.2 export jasper_ver=2.0.25 export zlib_ver=1.2.11 export libpng_ver=1.6.37 @@ -20,7 +18,6 @@ export sp_ver=2.3.3 export w3emc_ver=2.9.2 export pio_ver=2.5.10 export esmf_ver=8.4.1 -export w3nco_ver=2.4.1 export libjpeg_ver=9c export prod_util_ver=2.0.14 From a5d882d4e1c8d15e0abecb0fec7cc24f028b77b0 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Tue, 21 Nov 2023 15:52:52 +0000 Subject: [PATCH 09/24] delete ~//wmo for 00z and 12z cycle --- jobs/JAQM_BIAS_CORRECTION_O3 | 6 +++++- jobs/JAQM_BIAS_CORRECTION_PM25 | 6 +++++- jobs/JAQM_POST_STAT_O3 | 6 +++++- jobs/JAQM_POST_STAT_PM25 | 7 +++++++ 4 files changed, 22 insertions(+), 3 deletions(-) diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index e5cfd1b8b3..1d765e8527 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -87,7 +87,11 @@ This is the J-job script for the task that runs BIAS-CORRECTION-O3. # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" -mkdir -p ${PCOM} +if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + if [ ! -d "${PCOM}" ]; then + mkdir -p "${PCOM}" + fi +fi export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index 95d8ddfd31..f561dc605b 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -87,7 +87,11 @@ This is the J-job script for the task that runs BIAS-CORRECTION-PM25. # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" -mkdir -p ${PCOM} +if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + if [ ! -d "${PCOM}" ]; then + mkdir -p "${PCOM}" + fi +fi export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 13fe59275c..4057b1c31d 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -82,7 +82,11 @@ This is the J-job script for the task that runs POST-STAT-O3. # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" -mkdir -p ${PCOM} +if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + if [ ! -d "${PCOM}" ]; then + mkdir -p "${PCOM}" + fi +fi export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" # diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index c0b8fb627b..1de3adc97a 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -85,6 +85,13 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" mkdir -p ${PCOM} +if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + if [ ! -d "${PCOM}" ]; then + mkdir -p "${PCOM}" + fi +fi + + export PARMaqm_utils="${PARMaqm_utils:-${HOMEaqm}/sorc/AQM-utils/parm}" # #----------------------------------------------------------------------- From b2d9a6a10bc5dd64d8fbc29d3feb6d63c19ea527 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Wed, 22 Nov 2023 00:28:56 +0000 Subject: [PATCH 10/24] update data for J-job scripts --- jobs/JAQM_DATA_CLEANUP | 1 - jobs/JAQM_FORECAST | 9 +- scripts/exaqm_bias_correction_o3.sh | 8 +- scripts/exaqm_bias_correction_pm25.sh | 6 +- scripts/exaqm_forecast.sh | 14 +- scripts/exaqm_post.sh | 2 +- scripts/exaqm_post.sh_orig | 326 ++++++++++++++++++++++++++ scripts/exaqm_post_stat_o3.sh | 5 +- scripts/exaqm_post_stat_pm25.sh | 9 +- ush/source_util_funcs.sh | 2 +- 10 files changed, 348 insertions(+), 34 deletions(-) create mode 100755 scripts/exaqm_post.sh_orig diff --git a/jobs/JAQM_DATA_CLEANUP b/jobs/JAQM_DATA_CLEANUP index 5505559b0d..281a01433f 100755 --- a/jobs/JAQM_DATA_CLEANUP +++ b/jobs/JAQM_DATA_CLEANUP @@ -24,7 +24,6 @@ cd $DATA ############################################## # Run setpdy and initialize PDY variables ############################################## -export cycle="t${cyc}z" export CDATE=${PDY}${cyc} setpdy.sh diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index dffcc3c322..686c88553c 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -15,9 +15,14 @@ export subcyc=0 . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh + +export job=JAQM_FORECAST +export share_pid=${share_pid:-${PDY}${cyc}} +export pid=$share_pid +export jobid=${job}.${pid} + export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} - mkdir -p ${DATA} cd ${DATA} @@ -79,7 +84,7 @@ the specified cycle. # Create the INPUT and RESTART directories under the run directory. #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" + mkdir -p ${DATA}/INPUT mkdir -p ${DATA}/RESTART diff --git a/scripts/exaqm_bias_correction_o3.sh b/scripts/exaqm_bias_correction_o3.sh index 5f73cb08bd..542202c9a4 100755 --- a/scripts/exaqm_bias_correction_o3.sh +++ b/scripts/exaqm_bias_correction_o3.sh @@ -348,7 +348,7 @@ EOF1 cp ${DATA}/${NET}.${cycle}.max_*hr_o3_bc.*.grib2 ${COMOUT} - if [ "$SENDDBN" = "TRUE" ]; then + if [ "$SENDDBN" = "YES" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3_bc.227.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3_bc.227.grib2 fi @@ -376,7 +376,7 @@ EOF1 cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${PCOM} # Distribute Data - if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + if [ "${SENDDBN_NTC}" = "YES" ] ; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1ho3-max-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.8ho3-max-bc.227.grib2 fi @@ -423,7 +423,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then cp ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} fi -if [ "${SENDDBN}" = "TRUE" ] ; then +if [ "${SENDDBN}" = "YES" ] ; then ${DBNROOT}/bin/dbn_alert MODEL AQM_CONC ${job} ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_CONC ${job} ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 @@ -475,7 +475,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${PCOM} # Distribute Data - if [ "${SENDDBN}" = "TRUE" ]; then + if [ "${SENDDBN}" = "YES" ]; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 fi diff --git a/scripts/exaqm_bias_correction_pm25.sh b/scripts/exaqm_bias_correction_pm25.sh index 2edc049008..d1a8efc0c0 100755 --- a/scripts/exaqm_bias_correction_pm25.sh +++ b/scripts/exaqm_bias_correction_pm25.sh @@ -338,7 +338,7 @@ EOF1 cp ${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 ${COMOUT} cp ${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2 ${COMOUT} - if [ "${SENDDBN}" = "TRUE" ]; then + if [ "${SENDDBN}" = "YES" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUT}/${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2 fi @@ -356,7 +356,7 @@ wgrib2 tmpfile_pm25_bc -set_grib_type c3b -new_grid_winds earth -new_grid ${grid cp tmpfile_pm25_bc ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.${id_domain}.grib2 cp ${NET}.${cycle}.grib2_pm25_bc.227 ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.227.grib2 -if [ "${SENDDBN}" = "TRUE" ]; then +if [ "${SENDDBN}" = "YES" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.227.grib2 fi @@ -424,7 +424,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${PCOM} # Distribute Data - if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + if [ "${SENDDBN_NTC}" = "YES" ] ; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 diff --git a/scripts/exaqm_forecast.sh b/scripts/exaqm_forecast.sh index f79bb05863..01ebd92943 100755 --- a/scripts/exaqm_forecast.sh +++ b/scripts/exaqm_forecast.sh @@ -131,16 +131,6 @@ symlink="grid_spec.nc" create_symlink_to_file target="$target" symlink="$symlink" \ relative="${relative_link_flag}" -## Symlink to halo-3 grid file with "halo3" stripped from name. -#target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH3}.nc" -#if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ -# [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ -# [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "FALSE" ]; then -# symlink="C${GFDLgrid_NUM_CELLS}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.nc" -#else -# symlink="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.nc" -#fi - # Symlink to halo-3 grid file with "halo3" stripped from name. mosaic_fn="grid_spec.nc" grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" ) @@ -155,7 +145,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # If this link is not created, then the code hangs with an error message # like this: # -# check netcdf status= 2 +# check netcdf status= 2 # NetCDF error No such file or directory # Stopped # @@ -707,7 +697,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then mv ${DATA}/${post_orig_fn} ${post_renamed_fn} # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ $SENDDBN = "YES" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done diff --git a/scripts/exaqm_post.sh b/scripts/exaqm_post.sh index ffda82a371..b6555a8ebb 100755 --- a/scripts/exaqm_post.sh +++ b/scripts/exaqm_post.sh @@ -277,7 +277,7 @@ for fid in "${fids[@]}"; do mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ $SENDDBN = "YES" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done diff --git a/scripts/exaqm_post.sh_orig b/scripts/exaqm_post.sh_orig new file mode 100755 index 0000000000..b43b0a2b01 --- /dev/null +++ b/scripts/exaqm_post.sh_orig @@ -0,0 +1,326 @@ +#!/bin/bash + +set -xe + +msg="JOB $job HAS BEGUN" +postmsg "$msg" + +export pgm=aqm_post + +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. $USHaqm/source_util_funcs.sh +source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that runs the post-processor (UPP) on +the output files corresponding to a specified forecast hour. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Set OpenMP variables. +# +#----------------------------------------------------------------------- +# +export KMP_AFFINITY=${KMP_AFFINITY_RUN_POST} +export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_POST} +export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_POST} +# +#----------------------------------------------------------------------- +# +# Load modules. +# +#----------------------------------------------------------------------- +# +eval ${PRE_TASK_CMDS} + +nprocs=$(( NNODES_RUN_POST*PPN_RUN_POST )) +if [ -z "${RUN_CMD_POST:-}" ] ; then + print_err_msg_exit "\ + Run command was not set in machine file. \ + Please set RUN_CMD_POST for your platform" +else + print_info_msg "$VERBOSE" " + All executables will be submitted with command \'${RUN_CMD_POST}\'." +fi +# +#----------------------------------------------------------------------- +# +# Remove any files from previous runs and stage necessary files in the +# temporary work directory specified by DATA_FHR. +# +#----------------------------------------------------------------------- +# +rm -f fort.* +cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat +if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then + post_config_fp="${CUSTOM_POST_CONFIG_FP}" + print_info_msg " +==================================================================== +Copying the user-defined post flat file specified by CUSTOM_POST_CONFIG_FP +to the temporary work directory (DATA_FHR): + CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" + DATA_FHR = \"${DATA_FHR}\" +====================================================================" +else + if [ "${CPL_AQM}" = "TRUE" ]; then + post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" + else + post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" + fi + print_info_msg " +==================================================================== +Copying the default post flat file specified by post_config_fp to the +temporary work directory (DATA_FHR): + post_config_fp = \"${post_config_fp}\" + DATA_FHR = \"${DATA_FHR}\" +====================================================================" +fi +cp ${post_config_fp} ./postxconfig-NT.txt +cp ${PARMdir}/upp/params_grib2_tbl_new . +if [ ${USE_CRTM} = "TRUE" ]; then + cp ${CRTM_DIR}/fix/EmisCoeff/IR_Water/Big_Endian/Nalli.IRwater.EmisCoeff.bin ./ + cp ${CRTM_DIR}/fix/EmisCoeff/MW_Water/Big_Endian/FAST*.bin ./ + cp ${CRTM_DIR}/fix/EmisCoeff/IR_Land/SEcategory/Big_Endian/NPOESS.IRland.EmisCoeff.bin ./ + cp ${CRTM_DIR}/fix/EmisCoeff/IR_Snow/SEcategory/Big_Endian/NPOESS.IRsnow.EmisCoeff.bin ./ + cp ${CRTM_DIR}/fix/EmisCoeff/IR_Ice/SEcategory/Big_Endian/NPOESS.IRice.EmisCoeff.bin ./ + cp ${CRTM_DIR}/fix/AerosolCoeff/Big_Endian/AerosolCoeff.bin ./ + cp ${CRTM_DIR}/fix/CloudCoeff/Big_Endian/CloudCoeff.bin ./ + cp ${CRTM_DIR}/fix/SpcCoeff/Big_Endian/*.bin ./ + cp ${CRTM_DIR}/fix/TauCoeff/ODPS/Big_Endian/*.bin ./ + print_info_msg " +==================================================================== +Copying the external CRTM fix files from CRTM_DIR to the temporary +work directory (DATA_FHR): + CRTM_DIR = \"${CRTM_DIR}\" + DATA_FHR = \"${DATA_FHR}\" +====================================================================" +fi +# +#----------------------------------------------------------------------- +# +# Get the cycle date and hour (in formats of yyyymmdd and hh, respectively) +# from CDATE. +# +#----------------------------------------------------------------------- +# +yyyymmdd=${PDY} +hh=${cyc} +# +#----------------------------------------------------------------------- +# +# Create the namelist file (itag) containing arguments to pass to the post- +# processor's executable. +# +#----------------------------------------------------------------------- +# +# Set the variable (mnts_secs_str) that determines the suffix in the names +# of the forecast model's write-component output files that specifies the +# minutes and seconds of the corresponding output forecast time. +# +# Note that if the forecast model is instructed to output at some hourly +# interval (via the output_fh parameter in the MODEL_CONFIG_FN file, +# with nsout set to a non-positive value), then the write-component +# output file names will not contain any suffix for the minutes and seconds. +# For this reason, when SUB_HOURLY_POST is not set to "TRUE", mnts_sec_str +# must be set to a null string. +# +mnts_secs_str="" +if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then + if [ ${fhr}${fmn} = "00000" ]; then + mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) + else + mnts_secs_str=":${fmn}:00" + fi +fi +# +# Set the names of the forecast model's write-component output files. +# +if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then + DATAFCST=$DATAROOT/${RUN}_forecast${dot_ensmem/./_}_${cyc}.${share_pid} + if [ ! -d ${DATAFCST} ]; then + echo "Fatal error DATAFCST not found in production mode" + exit 7 + fi +else + DATAFCST=$DATAROOT/run_fcst${dot_ensmem/./_}.${share_pid} +fi + +if [ "${CPL_AQM}" = "TRUE" ]; then + dyn_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}${mnts_secs_str}.nc" + phy_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}${mnts_secs_str}.nc" +else + dyn_file="${DATAFCST}/dynf${fhr}${mnts_secs_str}.nc" + phy_file="${DATAFCST}/phyf${fhr}${mnts_secs_str}.nc" +fi +# +# Set parameters that specify the actual time (not forecast time) of the +# output. +# +post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours + ${fmn} minutes" "+%Y%m%d%H%M" ) +post_yyyy=${post_time:0:4} +post_mm=${post_time:4:2} +post_dd=${post_time:6:2} +post_hh=${post_time:8:2} +post_mn=${post_time:10:2} +# +# Create the input namelist file to the post-processor executable. +# +if [ "${CPL_AQM}" = "TRUE" ]; then + post_itag_add="aqf_on=.true.," +else + post_itag_add="" +fi +cat > itag <> $pgmout 2>errfile +export err=$?; err_chk +# +#----------------------------------------------------------------------- +# +# Move and rename the output files from the work directory to their final +# location in COMOUT. Also, create symlinks in COMOUT to the +# grib2 files that are needed by the data services group. Then delete +# the work directory. +# +#----------------------------------------------------------------------- +# +# Set variables needed in constructing the names of the grib2 files +# generated by UPP. +# +len_fhr=${#fhr} +if [ ${len_fhr} -eq 2 ]; then + post_fhr=${fhr} +elif [ ${len_fhr} -eq 3 ]; then + if [ "${fhr:0:1}" = "0" ]; then + post_fhr="${fhr:1}" + else + post_fhr="${fhr}" + fi +else + print_err_msg_exit "\ +The \${fhr} variable contains too few or too many characters: + fhr = \"$fhr\"" +fi + +post_mn_or_null="" +dot_post_mn_or_null="" +if [ "${post_mn}" != "00" ]; then + post_mn_or_null="${post_mn}" + dot_post_mn_or_null=".${post_mn}" +fi + +post_fn_suffix="GrbF${post_fhr}${dot_post_mn_or_null}" +post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" +# +# For convenience, change location to COMOUT (where the final output +# from UPP will be located). Then loop through the two files that UPP +# generates (i.e. "...prslev..." and "...natlev..." files) and move, +# rename, and create symlinks to them. +# +cd "${COMOUT}" +basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) +symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" +if [ "${CPL_AQM}" = "TRUE" ]; then + fids=( "cmaq" ) +else + fids=( "prslev" "natlev" ) +fi +for fid in "${fids[@]}"; do + FID=$(echo_uppercase $fid) + post_orig_fn="${FID}.${post_fn_suffix}" + post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" + mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} + + # DBN alert + if [ $SENDDBN = "TRUE" ]; then + $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} + fi +done + +rm -rf ${DATA_FHR} + +# +#----------------------------------------------------------------------- +# +# Print message indicating successful completion of script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Post-processing for forecast hour $fhr completed successfully. + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/scripts/exaqm_post_stat_o3.sh b/scripts/exaqm_post_stat_o3.sh index e348751bf6..c94e223da0 100755 --- a/scripts/exaqm_post_stat_o3.sh +++ b/scripts/exaqm_post_stat_o3.sh @@ -130,7 +130,6 @@ while [ ${fhr} -le ${FCST_LEN_HRS} ]; do done grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" -#grid148="lambert:263.0000:33.0000:45.0000 239.3720:442:12000.000 21.8210:265:12000.000" grid196="mercator:20.0000 198.4750:321:2500.000:206.1310 18.0730:255:2500.000:23.0880" grid198="nps:210.0000:60.0000 181.4290:825:5953.000 40.5300:553:5953.000" @@ -246,7 +245,7 @@ EOF1 wgrib2 ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_1hr_o3.${grid}.grib2 cp ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} - if [ "$SENDDBN" = "TRUE" ]; then + if [ "$SENDDBN" = "YES" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3.${grid}.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3.${grid}.grib2 fi @@ -270,7 +269,7 @@ EOF1 done cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${PCOM} - if [ "${SENDDBN_NTC}" = "TRUE" ]; then + if [ "${SENDDBN_NTC}" = "YES" ]; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 fi diff --git a/scripts/exaqm_post_stat_pm25.sh b/scripts/exaqm_post_stat_pm25.sh index 6e67bab1b9..a553ee7664 100755 --- a/scripts/exaqm_post_stat_pm25.sh +++ b/scripts/exaqm_post_stat_pm25.sh @@ -149,11 +149,6 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then # Post Files to PCOM cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${PCOM} - # Distribute Data -# if [ "${SENDDBN_NTC}" = "TRUE" ] ; then -# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25.${grid}.grib2 -# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 -# fi done fi @@ -272,12 +267,12 @@ EOF1 # Distribute Data ############################## - if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + if [ "${SENDDBN_NTC}" = "YES" ] ; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.1hpm25.${grid}.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi - if [ "$SENDDBN" = "TRUE" ]; then + if [ "$SENDDBN" = "YES" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${PCOM}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${PCOM}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index fa097de34d..6e44090e03 100755 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -49,7 +49,7 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # - . ${bashutils_dir}/define_macos_utilities.sh + . ${bashutils_dir}/define_macos_utilities.sh # #----------------------------------------------------------------------- # From c4e4c14751eb2ff8c8554760a46d5e82fe1675a8 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 24 Nov 2023 19:55:18 +0000 Subject: [PATCH 11/24] get rid of preamble.sh and DATE_UTIL, refine run.ver and build.ver --- .../nexus/jaqm_nexus_emission_master.ecf | 2 +- ecf/scripts/nexus/jaqm_nexus_post_split.ecf | 1 - ecf/scripts/prep/jaqm_ics.ecf | 1 - ecf/scripts/prep/jaqm_make_ics.ecf | 1 - ecf/scripts/prep/jaqm_make_lbcs.ecf | 1 - .../pts_fire_emis/jaqm_fire_emission.ecf | 2 - .../pts_fire_emis/jaqm_point_source.ecf | 1 - jobs/JAQM_BIAS_CORRECTION_O3 | 10 +- jobs/JAQM_BIAS_CORRECTION_PM25 | 9 +- jobs/JAQM_FIRE_EMISSION | 25 +- jobs/JAQM_FORECAST | 5 - jobs/JAQM_ICS | 7 - jobs/JAQM_LBCS | 8 - jobs/JAQM_MAKE_ICS | 10 +- jobs/JAQM_MAKE_LBCS | 11 +- jobs/JAQM_NEXUS_EMISSION | 10 +- jobs/JAQM_NEXUS_POST_SPLIT | 8 - jobs/JAQM_POINT_SOURCE | 9 +- jobs/JAQM_POST | 9 +- jobs/JAQM_POST_STAT_O3 | 7 - jobs/JAQM_POST_STAT_PM25 | 7 - jobs/JAQM_PRE_POST_STAT | 7 - parm/config/var_defns.sh.nco.static | 2 +- scripts/exaqm_fire_emission.sh | 21 +- scripts/exaqm_forecast.sh | 56 +--- scripts/exaqm_lbcs.sh | 2 +- scripts/exaqm_make_lbcs.sh | 2 +- scripts/exaqm_nexus_emission.sh | 11 +- scripts/exaqm_nexus_post_split.sh | 4 +- scripts/exaqm_post.sh | 17 +- sorc/Externals.cfg | 5 +- ush/bash_utils/define_macos_utilities.sh | 12 - ush/bash_utils/eval_METplus_timestr_tmpl.sh | 265 ------------------ ush/source_util_funcs.sh | 4 +- ush/update_input_nml.py | 162 +++++++++++ versions/build.ver | 1 - versions/run.ver | 11 +- 37 files changed, 199 insertions(+), 527 deletions(-) delete mode 100644 ush/bash_utils/eval_METplus_timestr_tmpl.sh create mode 100755 ush/update_input_nml.py diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf index 8134ed996b..91ce48bacd 100644 --- a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf @@ -18,7 +18,7 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} +#module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} diff --git a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf index a57f5e2a54..0501620eea 100644 --- a/ecf/scripts/nexus/jaqm_nexus_post_split.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_post_split.ecf @@ -17,7 +17,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} diff --git a/ecf/scripts/prep/jaqm_ics.ecf b/ecf/scripts/prep/jaqm_ics.ecf index b214131336..ad28218b11 100644 --- a/ecf/scripts/prep/jaqm_ics.ecf +++ b/ecf/scripts/prep/jaqm_ics.ecf @@ -17,7 +17,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} diff --git a/ecf/scripts/prep/jaqm_make_ics.ecf b/ecf/scripts/prep/jaqm_make_ics.ecf index 844eb3b69f..46a04c8869 100644 --- a/ecf/scripts/prep/jaqm_make_ics.ecf +++ b/ecf/scripts/prep/jaqm_make_ics.ecf @@ -18,7 +18,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} diff --git a/ecf/scripts/prep/jaqm_make_lbcs.ecf b/ecf/scripts/prep/jaqm_make_lbcs.ecf index ebbc16af47..53a172e05a 100644 --- a/ecf/scripts/prep/jaqm_make_lbcs.ecf +++ b/ecf/scripts/prep/jaqm_make_lbcs.ecf @@ -18,7 +18,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} diff --git a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf index ab5b3f38e2..221fb47d4e 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_fire_emission.ecf @@ -17,8 +17,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} -module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load udunits/${udunits_ver} diff --git a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf index d8bf54ec47..ef7feca984 100644 --- a/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf +++ b/ecf/scripts/pts_fire_emis/jaqm_point_source.ecf @@ -17,7 +17,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load python/${python_ver} diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 1d765e8527..48659f2390 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -50,13 +50,7 @@ export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 + # #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located @@ -99,10 +93,8 @@ TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} # #----------------------------------------------------------------------- -# # Call the ex-script for this J-job and pass to it the necessary varia- # bles. -# #----------------------------------------------------------------------- # startmsg diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index f561dc605b..42eb07409f 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -51,14 +51,7 @@ export PCOM=${PCOM:-${COMOUT}/wmo} export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# + #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index a9bad08e8b..e7ab0ce4a5 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -43,14 +43,6 @@ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -82,23 +74,8 @@ emission data files from disk, or HPSS. export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} yyyymmdd=${PDY} hh=${cyc} -export FIRE_FILE_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) +export FIRE_FILE_CDATE=`$NDATE -${TIME_OFFSET_HRS} $PDY$cyc` -# -#----------------------------------------------------------------------- -# Check whether FIRE EMISSION data files are available on the specified -# cycle date and time on HPSS (FIRE_FILE_CDATE). -#----------------------------------------------------------------------- -# -CDATE_min="2022101500" -if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then - print_info_msg " -======================================================================== -RAVE fire emission data are not available on HPSS for this date. -CDATE: \"${FIRE_FILE_CDATE}\" -CDATE_min: \"${CDATE_min}\" -========================================================================" -fi # #----------------------------------------------------------------------- # Set the run directory diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 686c88553c..ad12376c5e 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -51,11 +51,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 0c0810adca..1d7f862031 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -45,13 +45,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index fe0fdae697..fe093bdca1 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -47,14 +47,6 @@ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" export FIXaqmchem_lbcs="${FIXaqmchem_lbcs:-${HOMEaqm}/fix/chem_lbcs}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 9a933389ab..10ab5b9c13 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -45,14 +45,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -106,7 +98,7 @@ if [ "${RUN_TASK_GET_EXTRN_ICS}" = "FALSE" ]; then GFS_FILE_FMT=${FV3GFS_FILE_FMT_ICS} yyyymmdd=${PDY} hh=${cyc} - export EXTRN_MDL_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) + export EXTRN_MDL_CDATE=`$NDATE -${TIME_OFFSET_HRS} $PDY$cyc` export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" fi # diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 3efb751e63..92a61b8b84 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -48,15 +48,6 @@ export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -110,7 +101,7 @@ if [ "${RUN_TASK_GET_EXTRN_LBCS}" = "FALSE" ]; then GFS_FILE_FMT=${FV3GFS_FILE_FMT_LBCS} yyyymmdd=${PDY} hh=${cyc} - export EXTRN_MDL_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) + export EXTRN_MDL_CDATE=`$NDATE -${TIME_OFFSET_HRS} $PDY$cyc` export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" fi # diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index d4a4eca6de..7811105058 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -52,14 +52,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -106,7 +98,7 @@ if [ "${RUN_TASK_NEXUS_GFS_SFC}" = "FALSE" ]; then export TIME_OFFSET_HRS=${NEXUS_GFS_SFC_OFFSET_HRS:-0} yyyymmdd=${PDY} hh=${cyc} - export GFS_SFC_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) + export GFS_SFC_CDATE=`$NDATE -${TIME_OFFSET_HRS} $PDY$cyc` SLASH_ENSMEM_SUBDIR=${SLASH_ENSMEM_SUBDIR:-""} export GFS_SFC_STAGING_DIR="${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_GFS_SFC}" fi diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index 29a1a4a490..5b45769392 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -52,14 +52,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 731ed53d30..f8955b745c 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -42,18 +42,11 @@ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 + #----------------------------------------------------------------------- -# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). -# #----------------------------------------------------------------------- scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index f89fa8ae5a..fafc5a2908 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -48,14 +48,7 @@ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -# -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# + #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 4057b1c31d..5d5a04f878 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -46,13 +46,6 @@ export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})} export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" export PCOM=${PCOM:-${COMOUT}/wmo} -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index 1de3adc97a..c1e7e28c29 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -47,13 +47,6 @@ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc export PCOM=${PCOM:-${COMOUT}/wmo} -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 724039e2ac..e17e4a6240 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -42,13 +42,6 @@ export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})} export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -#----------------------------------------------------------------------- -# Save current shell options (in a global array). Then set new options -# for this script/function. -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in diff --git a/parm/config/var_defns.sh.nco.static b/parm/config/var_defns.sh.nco.static index 96008d64dd..bc35e99763 100644 --- a/parm/config/var_defns.sh.nco.static +++ b/parm/config/var_defns.sh.nco.static @@ -950,7 +950,7 @@ AQM_GEFS_FILE_CYC='' COMINemis='@COMaqm@/emissions' FIXaqmconfig='@HOMEaqm@/fix/aqm/epa/data' FIXaqmfire='@HOMEaqm@/fix/fire' -FIXaqmfire='@HOMEaqm@/fix/bio' +FIXaqmbio='@HOMEaqm@/fix/bio' FIXaqmdust='@HOMEaqm@/fix/FENGSHA' FIXaqmcanopy='@HOMEaqm@/fix/canopy' FIXaqmchem_lbcs='@HOMEaqm@/fix/chem_lbcs' diff --git a/scripts/exaqm_fire_emission.sh b/scripts/exaqm_fire_emission.sh index b82c18e5b3..4613cc48e0 100755 --- a/scripts/exaqm_fire_emission.sh +++ b/scripts/exaqm_fire_emission.sh @@ -62,7 +62,7 @@ data files. yyyymmdd=${FIRE_FILE_CDATE:0:8} hh=${FIRE_FILE_CDATE:8:2} -CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d%H" ) +CDATE_mh1=`$NDATE -1 ${yyyymmdd}${hh}` yyyymmdd_mh1=${CDATE_mh1:0:8} hh_mh1=${CDATE_mh1:8:2} @@ -81,12 +81,12 @@ if [ -e "${DCOMINfire}/${aqm_fire_file_fn}" ]; then else # Copy raw data for ihr in {0..23}; do - download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh1} ${hh_mh1} UTC - $ihr hours" "+%Y%m%d%H" ) + download_time=`$NDATE -$ihr ${yyyymmdd_mh1}${hh_mh1}` FILE_curr=Hourly_Emissions_13km_${download_time}00_${download_time}00.nc FILE_13km=RAVE-HrlyEmiss-13km_v1r3_blend_s${download_time}00000_e${download_time}59590_c*.nc yyyymmdd_dn=${download_time:0:8} hh_dn=${download_time:8:2} - missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" ) + missing_download_time=`$NDATE -24 ${yyyymmdd_dn}${hh_dn}` yyyymmdd_dn_md1=${missing_download_time:0:8} FILE_13km_md1=RAVE-HrlyEmiss-13km_v1r3_blend_s${missing_download_time}00000_e${missing_download_time}59590_c*.nc if [ -s `ls ${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}` ] && [ $(stat -c %s `ls ${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}`) -gt 4000000 ]; then @@ -155,21 +155,6 @@ else # Copy the final fire emission file to STAGING_DIR cp "${DATA}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" - # Archive the final fire emission file to disk and HPSS - if [ "${DO_AQM_SAVE_FIRE}" = "TRUE" ]; then - cp "${DATA}/${aqm_fire_file_fn}" ${DCOMINfire} - - hsi_log_fn="log.hsi_put.${yyyymmdd}_${hh}" - hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn} - export err=$? - if [ $err -ne 0 ]; then - message_txt="htar file writing operation (\"hsi put ...\") failed. Check the log -file hsi_log_fn in the DATA directory for details: - DATA = \"${DATA}\" - hsi_log_fn = \"${hsi_log_fn}\"" - err_exit "${message_txt}" - fi - fi fi # cd ${FIRE_EMISSION_STAGING_DIR} diff --git a/scripts/exaqm_forecast.sh b/scripts/exaqm_forecast.sh index 01ebd92943..22ddbae9a7 100755 --- a/scripts/exaqm_forecast.sh +++ b/scripts/exaqm_forecast.sh @@ -501,7 +501,8 @@ for the current cycle's (cdate) run directory (DATA) failed: num_restart_hrs=${#restart_hrs[*]} for (( ih_rst=${num_restart_hrs}-1; ih_rst>=0; ih_rst-- )); do - cdate_restart_hr=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${restart_hrs[ih_rst]} hours" "+%Y%m%d%H" ) + #jp cdate_restart_hr=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${restart_hrs[ih_rst]} hours" "+%Y%m%d%H" ) + cdate_restart_hr=`$NDATE +${restart_hrs[ih_rst]} ${PDY}${cyc}` rst_yyyymmdd="${cdate_restart_hr:0:8}" rst_hh="${cdate_restart_hr:8:2}" @@ -654,59 +655,6 @@ export err=$?; err_chk # #----------------------------------------------------------------------- # -# If doing inline post, create the directory in which the post-processing -# output will be stored (postprd_dir). -# -#----------------------------------------------------------------------- -# -if [ ${WRITE_DOPOST} = "TRUE" ]; then - - yyyymmdd=${PDY} - hh=${cyc} - fmn="00" - - mkdir -p "${COMOUT}" - - cd ${COMOUT} - - for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do - - if [ ${fhr:0:1} = "0" ]; then - fhr_d=${fhr:1:2} - else - fhr_d=${fhr} - fi - - post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr_d} hours + ${fmn} minutes" "+%Y%m%d%H%M" ) - post_mn=${post_time:10:2} - post_mn_or_null="" - post_fn_suffix="GrbF${fhr_d}" - post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" - - if [ "${CPL_AQM}" = "TRUE" ]; then - fids=( "cmaq" ) - else - fids=( "prslev" "natlev" ) - fi - - for fid in "${fids[@]}"; do - FID=$(echo_uppercase $fid) - post_orig_fn="${FID}.${post_fn_suffix}" - post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - - mv ${DATA}/${post_orig_fn} ${post_renamed_fn} - - # DBN alert - if [ $SENDDBN = "YES" ]; then - $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} - fi - done - done - -fi -# -#----------------------------------------------------------------------- -# # Copy RESTART directory to COMIN only for NCO mode and AQM. # Copy AQM output product file to COMOUT only for NCO mode. # Copy dyn and phy files to COMIN only for AQM. diff --git a/scripts/exaqm_lbcs.sh b/scripts/exaqm_lbcs.sh index 39ade384ce..66c42c33ce 100755 --- a/scripts/exaqm_lbcs.sh +++ b/scripts/exaqm_lbcs.sh @@ -85,7 +85,7 @@ fi # #----------------------------------------------------------------------- # -CDATE_MOD=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC - ${EXTRN_MDL_LBCS_OFFSET_HRS} hours" "+%Y%m%d%H" ) +CDATE_MOD=`$NDATE -${EXTRN_MDL_LBCS_OFFSET_HRS} ${PDY}${cyc}` yyyymmdd=${CDATE_MOD:0:8} mm="${CDATE_MOD:4:2}" hh="${CDATE_MOD:8:2}" diff --git a/scripts/exaqm_make_lbcs.sh b/scripts/exaqm_make_lbcs.sh index 35bf91f006..f818d75e6f 100755 --- a/scripts/exaqm_make_lbcs.sh +++ b/scripts/exaqm_make_lbcs.sh @@ -466,7 +466,7 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N dd="${EXTRN_MDL_CDATE:6:2}" hh="${EXTRN_MDL_CDATE:8:2}" - cdate_crnt_fhr=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours" "+%Y%m%d%H" ) + cdate_crnt_fhr=`$NDATE +${fhr} ${yyyymmdd}${hh}` # # Get the month, day, and hour corresponding to the current forecast time # of the the external model. diff --git a/scripts/exaqm_nexus_emission.sh b/scripts/exaqm_nexus_emission.sh index f44d6622cd..9b776fa6e5 100755 --- a/scripts/exaqm_nexus_emission.sh +++ b/scripts/exaqm_nexus_emission.sh @@ -195,20 +195,21 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then fi if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) + start_date=${yyyymmdd}${hh} + end_date=`$NDATE +${FCST_LEN_HRS} ${yyyymmdd}${hh}` else len_per_split=$(( FCST_LEN_HRS / NUM_SPLIT_NEXUS )) nsptp=$(( nspt+1 )) # Compute start and end dates for nexus split option start_del_hr=$(( len_per_split * nspt )) - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${start_del_hr} hours " "+%Y%m%d%H" ) + start_date=`$NDATE +${start_del_hr} ${yyyymmdd}${hh}` if [ "${nsptp}" = "${NUM_SPLIT_NEXUS}" ];then - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $FCST_LEN_HRS + 1) hours" "+%Y%m%d%H" ) + end_date=`$NDATE +$(expr $FCST_LEN_HRS + 1) ${yyyymmdd}${hh}` else end_del_hr=$(( len_per_split * nsptp )) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $end_del_hr + 1) hours" "+%Y%m%d%H" ) + end_del_hr1=$(( $end_del_hr + 1 )) + end_date=`$NDATE +${end_del_hr1} ${yyyymmdd}${hh}` fi fi # diff --git a/scripts/exaqm_nexus_post_split.sh b/scripts/exaqm_nexus_post_split.sh index 93186a5615..43b5e0ffb5 100755 --- a/scripts/exaqm_nexus_post_split.sh +++ b/scripts/exaqm_nexus_post_split.sh @@ -70,8 +70,8 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi -start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) -end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) +start_date=${yyyymmdd}${hh} +end_date=`$NDATE +${FCST_LEN_HRS} ${yyyymmdd}${hh}` # #----------------------------------------------------------------------- diff --git a/scripts/exaqm_post.sh b/scripts/exaqm_post.sh index b6555a8ebb..4922248c2a 100755 --- a/scripts/exaqm_post.sh +++ b/scripts/exaqm_post.sh @@ -162,27 +162,22 @@ hh=${cyc} # must be set to a null string. # mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then - if [ ${fhr}${fmn} = "00000" ]; then - mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) - else - mnts_secs_str=":${fmn}:00" - fi -fi - dyn_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}${mnts_secs_str}.nc" - phy_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}${mnts_secs_str}.nc" +dyn_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}${mnts_secs_str}.nc" +phy_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}${mnts_secs_str}.nc" # # Set parameters that specify the actual time (not forecast time) of the # output. # -post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours + ${fmn} minutes" "+%Y%m%d%H%M" ) +fmn='00' +post_time=`$NDATE +${fhr} ${yyyymmdd}${hh}`$fmn post_yyyy=${post_time:0:4} post_mm=${post_time:4:2} post_dd=${post_time:6:2} post_hh=${post_time:8:2} post_mn=${post_time:10:2} + # # Create the input namelist file to the post-processor executable. # @@ -263,7 +258,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri # rename, and create symlinks to them. # cd "${COMOUT}" -basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) +basetime=$yyyymmdd$hh symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" if [ "${CPL_AQM}" = "TRUE" ]; then fids=( "cmaq" ) diff --git a/sorc/Externals.cfg b/sorc/Externals.cfg index 6a5553bc47..40f8465cbe 100644 --- a/sorc/Externals.cfg +++ b/sorc/Externals.cfg @@ -12,7 +12,8 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = production/AQM.v7 -hash = 4cf17f7 +#hash = 4cf17f7 +hash = 80d0fe8 local_path = ufs-weather-model required = True @@ -39,7 +40,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = d05a358 +hash = b4d37a8 local_path = AQM-utils required = True diff --git a/ush/bash_utils/define_macos_utilities.sh b/ush/bash_utils/define_macos_utilities.sh index d89780944d..fe66d93eef 100644 --- a/ush/bash_utils/define_macos_utilities.sh +++ b/ush/bash_utils/define_macos_utilities.sh @@ -26,19 +26,7 @@ Aborting. exit 1 } - if [[ $(uname -s) == Darwin ]]; then - export READLINK=greadlink - command -v $READLINK >/dev/null 2>&1 || darwinerror $READLINK - export SED=gsed - command -v $SED >/dev/null 2>&1 || darwinerror $SED - export DATE_UTIL=gdate - command -v $DATE_UTIL >/dev/null 2>&1 || darwinerror $DATE_UTIL - export LN_UTIL=gln - command -v $LN_UTIL >/dev/null 2>&1 || darwinerror $LN_UTIL - else export READLINK=readlink export SED=sed export DATE_UTIL=date export LN_UTIL=ln - fi - diff --git a/ush/bash_utils/eval_METplus_timestr_tmpl.sh b/ush/bash_utils/eval_METplus_timestr_tmpl.sh deleted file mode 100644 index 0f5db76153..0000000000 --- a/ush/bash_utils/eval_METplus_timestr_tmpl.sh +++ /dev/null @@ -1,265 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that evaluates a METplus time-string -# template. -# -#----------------------------------------------------------------------- -# -function eval_METplus_timestr_tmpl() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "init_time" \ - "fhr" \ - "METplus_timestr_tmpl" \ - "outvarname_formatted_time" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args "valid_args" -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local fmt \ - formatted_time \ - hh_init \ - init_time_str \ - lead_hrs \ - len \ - mn_init \ - METplus_time_fmt \ - METplus_time_shift \ - METplus_time_type \ - regex_search \ - ss_init \ - valid_time_str \ - yyyymmdd_init -# -#----------------------------------------------------------------------- -# -# Run checks on input arguments. -# -#----------------------------------------------------------------------- -# - if [ -z "${METplus_timestr_tmpl}" ]; then - print_err_msg_exit "\ -The specified METplus time string template (METplus_timestr_tmpl) cannot be empty: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - fi - - len=${#init_time} - if [[ ${init_time} =~ ^[0-9]+$ ]]; then - if [ "$len" -ne 10 ] && [ "$len" -ne 12 ] && [ "$len" -ne 14 ]; then - print_err_msg_exit "\ -The specified initial time string (init_time) must contain exactly 10, -12, or 14 integers (but contains $len): - init_time = \"${init_time}\"" - fi - else - print_err_msg_exit "\ -The specified initial time string (init_time) must consist of only -integers and cannot be empty: - init_time = \"${init_time}\"" - fi - - if ! [[ $fhr =~ ^[0-9]+$ ]]; then - print_err_msg_exit "\ -The specified forecast hour (fhr) must consist of only integers and -cannot be empty: - fhr = \"${fhr}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set strings for the initial and valid times that can be passed to the -# "date" utility for evaluation. -# -#----------------------------------------------------------------------- -# - yyyymmdd_init=${init_time:0:8} - hh_init=${init_time:8:2} - - mn_init="00" - if [ "$len" -gt "10" ]; then - mn_init=${init_time:10:2} - fi - - ss_init="00" - if [ "$len" -gt "12" ]; then - ss_init=${init_time:12:2} - fi - - init_time_str=$( printf "%s" "${yyyymmdd_init} + ${hh_init} hours + ${mn_init} minutes + ${ss_init} seconds" ) - valid_time_str=$( printf "%s" "${init_time_str} + ${fhr} hours" ) -# -#----------------------------------------------------------------------- -# -# Parse the input METplus time string template. -# -#----------------------------------------------------------------------- -# - regex_search="^\{(init|valid|lead)(\?)(fmt=)([^\?]*)(\?)?(shift=)?([^\?]*)?\}" - METplus_time_type=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\1/p" ) - METplus_time_fmt=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\4/p" ) - METplus_time_shift=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\7/p" ) -# -#----------------------------------------------------------------------- -# -# Get strings for the time format and time shift that can be passed to -# the "date" utility or the "printf" command. -# -#----------------------------------------------------------------------- -# - case "${METplus_time_fmt}" in - "%Y%m%d%H"|"%Y%m%d"|"%H%M%S"|"%H") - fmt="${METplus_time_fmt}" - ;; - "%HHH") -# -# Print format assumes that the argument to printf (i.e. the number to -# print out) may be a float. If we instead assume an integer and use -# "%03d" as the format, the printf function below will fail if the argument -# happens to be a float. The "%03.0f" format will work for both a float -# and an integer argument (and will truncate the float and print out a -# 3-digit integer). -# - fmt="%03.0f" - ;; - *) - print_err_msg_exit "\ -Unsupported METplus time format: - METplus_time_fmt = \"${METplus_time_fmt}\" -METplus time string template passed to this function is: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - ;; - esac - - time_shift_str=$(( ${METplus_time_shift} + 0 ))" seconds" -# -#----------------------------------------------------------------------- -# -# Set the formatted time string. -# -#----------------------------------------------------------------------- -# - case "${METplus_time_type}" in - "init") - formatted_time=$( ${DATE_UTIL} --date="${init_time_str} + ${time_shift_str}" +"${fmt}" ) - ;; - "valid") - formatted_time=$( ${DATE_UTIL} --date="${valid_time_str} + ${time_shift_str}" +"${fmt}" ) - ;; - "lead") - lead_secs=$(( $( ${DATE_UTIL} --date="${valid_time_str} + ${time_shift_str}" +"%s" ) \ - - $( ${DATE_UTIL} --date="${init_time_str}" +"%s" ) )) - lead_hrs=$( bc -l <<< "${lead_secs}/${SECS_PER_HOUR}" ) -# -# Check to make sure lead_hrs is an integer. -# - lead_hrs_trunc=$( bc <<< "${lead_secs}/${SECS_PER_HOUR}" ) - lead_hrs_rem=$( bc -l <<< "${lead_hrs} - ${lead_hrs_trunc}" ) - if [ "${lead_hrs_rem}" != "0" ]; then - print_err_msg_exit "\ -The lead in hours (lead_hrs) must be an integer but isn't: - lead_hrs = ${lead_hrs} -The lead in seconds (lead_secs) is: - lead_secs = ${lead_secs} -The remainder (lead_hrs_rem) after dividing the lead_secs by SECS_PER_HOUR -= ${SECS_PER_HOUR} is: - lead_hrs_rem = ${lead_hrs_rem}" - fi -# -# Get the lead in the proper format. -# - formatted_time=$( printf "${fmt}" "${lead_hrs}" ) - ;; - *) - print_err_msg_exit "\ -Unsupported METplus time type: - METplus_time_type = \"${METplus_time_type}\" -METplus time string template passed to this function is: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - ;; - esac - - if [ -z "${formatted_time}" ]; then - print_err_msg_exit "\ -The specified METplus time string template (METplus_timestr_tmpl) could -not be evaluated for the given initial time (init_time) and forecast -hour (fhr): - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\" - init_time = \"${init_time}\" - fhr = \"${fhr}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_formatted_time}" ]; then - printf -v ${outvarname_formatted_time} "%s" "${formatted_time}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 6e44090e03..9442deec60 100755 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -49,7 +49,7 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # - . ${bashutils_dir}/define_macos_utilities.sh + . ${bashutils_dir}/define_macos_utilities.sh # #----------------------------------------------------------------------- # @@ -229,7 +229,7 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # - . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh +#jp . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh # #----------------------------------------------------------------------- # diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py new file mode 100755 index 0000000000..7207316298 --- /dev/null +++ b/ush/update_input_nml.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 + +import os +import sys +import argparse +import unittest +import logging +from textwrap import dedent + +from python_utils import ( + import_vars, + print_input_args, + print_info_msg, + print_err_msg_exit, + cfg_to_yaml_str, + load_shell_config, + flatten_dict, +) + +from set_namelist import set_namelist + + +def update_input_nml(run_dir): + """Update the FV3 input.nml file in the specified run directory + + Args: + run_dir: run directory + Returns: + Boolean + """ + + print_input_args(locals()) + + # import all environment variables + import_vars() + + # + # ----------------------------------------------------------------------- + # + # Update the FV3 input.nml file in the specified run directory. + # + # ----------------------------------------------------------------------- + # + print_info_msg( + f""" + Updating the FV3 input.nml file in the specified run directory (run_dir): + run_dir = '{run_dir}'""", + verbose=VERBOSE, + ) + # + # ----------------------------------------------------------------------- + # + # Set new values of the specific parameters to be updated. + # + # ----------------------------------------------------------------------- + # + settings = {} + + # For restart run + if args.restart: + settings["fv_core_nml"] = { + "external_ic": False, + "make_nh": False, + "mountain": True, + "na_init": 0, + "nggps_ic": False, + "warm_start": True, + } + + settings["gfs_physics_nml"] = { + "nstf_name": [2, 0, 0, 0, 0], + } + + + settings_str = cfg_to_yaml_str(settings) + + print_info_msg( + dedent( + f""" + The variable 'settings' specifying values to be used in the FV3 'input.nml' + file for restart has been set as follows:\n + settings =\n\n""" + ) + + settings_str, + verbose=VERBOSE, + ) + # + # ----------------------------------------------------------------------- + # + # Call a python script to update the experiment's actual FV3 INPUT.NML + # file for restart. + # + # ----------------------------------------------------------------------- + # + fv3_input_nml_fp = os.path.join(run_dir, FV3_NML_FN) + + try: + set_namelist( + [ + "-q", + "-n", + fv3_input_nml_fp, + "-u", + settings_str, + "-o", + fv3_input_nml_fp, + ] + ) + except: + logging.exception( + dedent( + f""" + Call to python script set_namelist.py to generate an FV3 namelist file + failed. Parameters passed to this script are: + Full path to base namelist file: + fv3_input_nml_fp = '{fv3_input_nml_fp}' + Full path to output namelist file: + fv3_input_nml_fp = '{fv3_input_nml_fp}' + Namelist settings specified on command line:\n + settings =\n\n""" + ) + + settings_str + ) + return False + + return True + + +def parse_args(argv): + """Parse command line arguments""" + parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") + + parser.add_argument( + "-r", "--run_dir", + dest="run_dir", + required=True, + help="Run directory." + ) + + parser.add_argument( + "-p", "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.", + ) + + parser.add_argument( + "--restart", + action='store_true', + help='Update for restart') + + return parser.parse_args(argv) + + +if __name__ == "__main__": + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + cfg = flatten_dict(cfg) + import_vars(dictionary=cfg) + update_input_nml( + run_dir=args.run_dir, + ) diff --git a/versions/build.ver b/versions/build.ver index 90f7d56f20..c2ce8dc81e 100644 --- a/versions/build.ver +++ b/versions/build.ver @@ -34,5 +34,4 @@ export sigio_ver=2.3.2 export sfcio_ver=1.4.1 export wrf_io_ver=1.2.0 export wgrib2_ver=2.0.8_wmo -export bufr_ver=11.7.0 export nemsiogfs_ver=2.5.3 diff --git a/versions/run.ver b/versions/run.ver index 2be391ac76..38cb4825d6 100644 --- a/versions/run.ver +++ b/versions/run.ver @@ -4,12 +4,9 @@ export intel_ver=19.1.3.304 export craype_ver=2.7.13 export cray_mpich_ver=8.1.12 export jasper_ver=2.0.25 -export zlib_ver=1.2.11 -export libpng_ver=1.6.37 export hdf5_ver=1.10.6 export netcdf_ver=4.7.4 export fms_ver=2022.04 -export bacio_ver=2.4.1 export crtm_ver=2.4.0 export g2_ver=3.4.5 export g2tmpl_ver=1.10.2 @@ -25,19 +22,13 @@ export prod_util_ver=2.0.14 export cray_pals_ver=1.2.2 export nemsio_ver=2.5.2 -export sigio_ver=2.3.2 -export sfcio_ver=1.4.1 -export wrf_io_ver=1.2.0 -export bufr_ver=11.7.0 -export nemsiogfs_ver=2.5.3 export python_ver=3.8.6 -export rocoto_ver=1.3.5 export envvar_ver=1.0 export gfs_ver=v16.3 export gefs_ver=v12.3 -export aqm_ver=v7.0.0 +export aqm_ver=v7.0.1 export udunits_ver=2.2.28 export gsl_ver=2.7 From 3b9c77f01fc1c24fdd9e3703d0126f375af9ecbf Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 24 Nov 2023 22:30:10 +0000 Subject: [PATCH 12/24] updaet J-job scripts with COMaqm --- jobs/JAQM_BIAS_CORRECTION_O3 | 16 +++++++--------- jobs/JAQM_BIAS_CORRECTION_PM25 | 18 +++++++++--------- jobs/JAQM_FIRE_EMISSION | 8 ++++---- jobs/JAQM_FORECAST | 10 ++++------ jobs/JAQM_ICS | 9 ++++----- jobs/JAQM_LBCS | 8 ++++---- jobs/JAQM_MAKE_ICS | 9 ++++----- jobs/JAQM_MAKE_LBCS | 9 ++++----- jobs/JAQM_NEXUS_EMISSION | 13 ++++++------- jobs/JAQM_NEXUS_POST_SPLIT | 12 ++++++------ jobs/JAQM_POINT_SOURCE | 12 ++++++------ jobs/JAQM_POST | 9 ++++----- jobs/JAQM_POST_STAT_O3 | 14 +++++--------- jobs/JAQM_POST_STAT_PM25 | 12 +++++------- jobs/JAQM_PRE_POST_STAT | 10 ++++------ 15 files changed, 76 insertions(+), 93 deletions(-) diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 48659f2390..3f7c547685 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -40,18 +39,16 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" -export PCOM=${PCOM:-${COMOUT}/wmo} export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" -export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} -export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} +export COMINbicor=${COMINbicor:-${COMaqm}} +export COMOUTbicor=${COMOUTbicor:-${COMaqm}} -# #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -82,6 +79,7 @@ This is the J-job script for the task that runs BIAS-CORRECTION-O3. DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + export PCOM=${PCOM:-${COMOUT}/wmo} if [ ! -d "${PCOM}" ]; then mkdir -p "${PCOM}" fi diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index 42eb07409f..9b4a200d87 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -41,16 +40,16 @@ export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -export PCOM=${PCOM:-${COMOUT}/wmo} +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" + +export DCOMINairnow="${DCOMINairnow:-${DCOMROOT}}" -export COMINbicor=${COMINbicor:-$(compath.py ${NET}/${model_ver}})} -export COMOUTbicor=${COMOUTbicor:-$(compath.py -o ${NET}/${aqm_ver}})} +export COMINbicor=${COMINbicor:-${COMaqm}} +export COMOUTbicor=${COMOUTbicor:-${COMaqm}} #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located @@ -81,6 +80,7 @@ This is the J-job script for the task that runs BIAS-CORRECTION-PM25. DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + export PCOM=${PCOM:-${COMOUT}/wmo} if [ ! -d "${PCOM}" ]; then mkdir -p "${PCOM}" fi diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index e7ab0ce4a5..f3dbc89257 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -39,9 +38,10 @@ export FIXaqmfire="${FIXaqmfire:-${HOMEaqm}/fix/fire}" export DCOMROOT=${DCOMROOT:-/lfs/h1/ops/prod/dcom} export DCOMINfire="${DCOMINfire:-${DCOMROOT}/${PDY}/rave}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index ad12376c5e..fc2e1e8173 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -21,7 +21,6 @@ export share_pid=${share_pid:-${PDY}${cyc}} export pid=$share_pid export jobid=${job}.${pid} -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} cd ${DATA} @@ -45,11 +44,10 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 1d7f862031..1c252a2e9a 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -20,7 +20,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -40,10 +39,10 @@ export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index fe093bdca1..6280d61f1a 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -20,7 +20,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT=${DATAROOT:-${DATAROOT_dfv}} export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -40,9 +39,10 @@ export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" export FIXaqmchem_lbcs="${FIXaqmchem_lbcs:-${HOMEaqm}/fix/chem_lbcs}" diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 10ab5b9c13..d4d2894f05 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -20,7 +20,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . ${USHaqm}/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -40,10 +39,10 @@ export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 92a61b8b84..46c079d46a 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -21,7 +21,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -41,12 +40,12 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 7811105058..e776281e39 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -16,10 +16,8 @@ export pid=${pid:-$$} export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh - . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -45,12 +43,13 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMINemis="${COMINemis:-${COMIN}/emission}" +export COMINemis="${COMINemis:-${COMaqm}/emissions}" export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index 5b45769392..f44618004f 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -19,7 +19,6 @@ export outid=${outid:-"LL$job"} . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -45,12 +44,13 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMINemis="${COMINemis:-${COMIN}/emission}" export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMINemis="${COMINemis:-${COMaqm}/emissions}" + +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index f8955b745c..6b60079bbb 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -31,17 +30,18 @@ setpdy.sh . $USHaqm/job_preamble.sh -export COMINemispt="${COMINemispt:-${COMIN}/emission/pt}" export EXECaqm=${EXECaqm:-${HOMEaqm}/exec} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMINemispt="${COMINemispt:-${COMaqm}/emissions/nei2016v1-pt}" + +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index fafc5a2908..7a120fbcd5 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -19,7 +19,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -44,10 +43,10 @@ export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 5d5a04f878..e652d7b216 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -12,10 +12,8 @@ export pid=${pid:-$$} export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh - . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -38,13 +36,10 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" - -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -export PCOM=${PCOM:-${COMOUT}/wmo} +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located @@ -76,6 +71,7 @@ This is the J-job script for the task that runs POST-STAT-O3. DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + export PCOM=${PCOM:-${COMOUT}/wmo} if [ ! -d "${PCOM}" ]; then mkdir -p "${PCOM}" fi diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index c1e7e28c29..a8fe4bfe3f 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -40,12 +39,10 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - -export PCOM=${PCOM:-${COMOUT}/wmo} +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located @@ -79,6 +76,7 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" mkdir -p ${PCOM} if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then + export PCOM=${PCOM:-${COMOUT}/wmo} if [ ! -d "${PCOM}" ]; then mkdir -p "${PCOM}" fi diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index e17e4a6240..abe54f6dc9 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -17,7 +17,6 @@ export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export DATAROOT="${DATAROOT:-${DATAROOT_dfv}}" export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} @@ -36,11 +35,10 @@ export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export FIXaqm=${FIXaqm:-${HOMEaqm}/fix} export UTILaqm=${UTILaqm:-${HOMEaqm}/util} -export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" -export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" -export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" -export COMINm2="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm2})}" -export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +export COMIN="${COMIN:-${COMaqm}/${RUN}.${PDY}}" +export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" +export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" +export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located From 5b0ad090067c988d06ee12f5dae2083dcabf7554 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 24 Nov 2023 23:47:44 +0000 Subject: [PATCH 13/24] clean up J-job scripts --- jobs/JAQM_BIAS_CORRECTION_O3 | 2 -- jobs/JAQM_BIAS_CORRECTION_PM25 | 2 -- jobs/JAQM_FIRE_EMISSION | 2 -- jobs/JAQM_FORECAST | 2 -- jobs/JAQM_ICS | 2 -- jobs/JAQM_LBCS | 2 -- jobs/JAQM_MAKE_ICS | 2 -- jobs/JAQM_MAKE_LBCS | 3 --- jobs/JAQM_NEXUS_EMISSION | 2 -- jobs/JAQM_NEXUS_POST_SPLIT | 2 -- jobs/JAQM_POINT_SOURCE | 2 -- jobs/JAQM_POST | 4 ---- jobs/JAQM_POST_STAT_PM25 | 2 -- jobs/JAQM_PRE_POST_STAT | 2 -- 14 files changed, 31 deletions(-) diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 3f7c547685..f69e6b95de 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index 9b4a200d87..f81fcbbc4b 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index f3dbc89257..83a95c741b 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index fc2e1e8173..1009ed3a55 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 1c252a2e9a..f2f558f3f6 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${EXECaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 6280d61f1a..9fe0450f3e 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${EXECaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index d4d2894f05..281fe1af8c 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${EXECaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 46c079d46a..f841a4b5b1 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -4,9 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -# -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index e776281e39..4a9119ae11 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index f44618004f..5d096a14ec 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 6b60079bbb..cd2549777b 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 7a120fbcd5..2c80e79dca 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -4,10 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index a8fe4bfe3f..d7b9143532 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index abe54f6dc9..813ba359d4 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -4,8 +4,6 @@ date export PS4=' $SECONDS + ' set -xue -#----------------------------------------------------------------------- -# Source the variable definitions file and the bash utility functions. #----------------------------------------------------------------------- export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} From 29bf087af3615d4d9e654a04f715790a8dbb2d1b Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Sat, 25 Nov 2023 02:25:37 +0000 Subject: [PATCH 14/24] clean J-job scripts and delete rocoto related information --- .../nexus/jaqm_nexus_emission_master.ecf | 1 - jobs/JAQM_FORECAST | 1 - jobs/JAQM_MAKE_ICS | 4 - jobs/JAQM_MAKE_LBCS | 3 - jobs/JAQM_NEXUS_POST_SPLIT | 1 - jobs/JAQM_POST | 24 +- scripts/exaqm_forecast.sh | 7 - scripts/exaqm_post.sh_orig | 326 ------------------ scripts/exaqm_pre_post_stat.sh | 3 - 9 files changed, 1 insertion(+), 369 deletions(-) delete mode 100755 scripts/exaqm_post.sh_orig diff --git a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf index 91ce48bacd..f5999f4a7a 100644 --- a/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf +++ b/ecf/scripts/nexus/jaqm_nexus_emission_master.ecf @@ -18,7 +18,6 @@ export cyc="%CYC%" ############################################################ module load intel/${intel_ver} module load craype/${craype_ver} -#module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 1009ed3a55..662613b50c 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -13,7 +13,6 @@ export subcyc=0 . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh - export job=JAQM_FORECAST export share_pid=${share_pid:-${PDY}${cyc}} export pid=$share_pid diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 281fe1af8c..9cf5f50213 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -68,19 +68,15 @@ for the FV3 (in NetCDF format). ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if that directory doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" mkdir -p ${INPUT_DATA} # #----------------------------------------------------------------------- -# # Set the run directory -# #----------------------------------------------------------------------- # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index f841a4b5b1..1d54f6307e 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -43,7 +43,6 @@ export COMINm1="${COMIN:-${COMaqm}/${RUN}.${PDYm1}}" export COMINm2="${COMIN:-${COMaqm}/${RUN}.${PDYm2}}" export COMOUT="${COMOUT:-${COMaqm}/${RUN}.${PDY}/${cyc}}" - #----------------------------------------------------------------------- # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in @@ -70,10 +69,8 @@ hour zero). ========================================================================" # #----------------------------------------------------------------------- -# # Set the name of and create the directory in which the output from this # script will be placed (if it doesn't already exist). -# #----------------------------------------------------------------------- # export INPUT_DATA="${INPUT_DATA:-${COMIN}/${cyc}}" diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index 5d096a14ec..2a98204365 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -14,7 +14,6 @@ export pid=${pid:-$$} export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh - . $USHaqm/source_util_funcs.sh export DATA=${DATA:-${DATAROOT}/${jobid}} diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 2c80e79dca..f935772d13 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -115,10 +115,6 @@ ${HOMEaqm}/scripts/exaqm_post.sh export err=$?; err_chk # #----------------------------------------------------------------------- -# Create a flag file to make rocoto aware that the run_post task has -# successfully completed. This flag is necessary for varying forecast -# hours (FCST_LEN_HRS: -1) -#----------------------------------------------------------------------- # if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -126,27 +122,9 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) - if [ "${fhr}" = "${fcst_len_hrs}" ]; then - touch "${COMIN}/${cyc}/post_${PDY}${cyc}_task_complete.txt" - fi -fi # fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) -#if [ "${fhr}" = "${fcst_len_hrs}" ]; then -# if [ "${KEEPDATA}" = "FALSE" ]; then -# cd ${DATAROOT} -# rm -rf $DATA -# share_pid="${WORKFLOW_ID}_${PDY}${cyc}" -# rm -rf *${share_pid} -# fi -#else -# if [ "${KEEPDATA}" = "FALSE" ]; then -# cd ${DATAROOT} -# rm -rf $DATA -# fi -#fi +# # Print exit message print_info_msg " ======================================================================== diff --git a/scripts/exaqm_forecast.sh b/scripts/exaqm_forecast.sh index 22ddbae9a7..a8c4cb248c 100755 --- a/scripts/exaqm_forecast.sh +++ b/scripts/exaqm_forecast.sh @@ -356,12 +356,6 @@ fi # #----------------------------------------------------------------------- # -# If running this cycle/ensemble member combination more than once (e.g. -# using rocotoboot), remove any time stamp file that may exist from the -# previous attempt. -# -#----------------------------------------------------------------------- -# cd ${DATA} rm -f time_stamp.out # @@ -501,7 +495,6 @@ for the current cycle's (cdate) run directory (DATA) failed: num_restart_hrs=${#restart_hrs[*]} for (( ih_rst=${num_restart_hrs}-1; ih_rst>=0; ih_rst-- )); do - #jp cdate_restart_hr=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${restart_hrs[ih_rst]} hours" "+%Y%m%d%H" ) cdate_restart_hr=`$NDATE +${restart_hrs[ih_rst]} ${PDY}${cyc}` rst_yyyymmdd="${cdate_restart_hr:0:8}" rst_hh="${cdate_restart_hr:8:2}" diff --git a/scripts/exaqm_post.sh_orig b/scripts/exaqm_post.sh_orig deleted file mode 100755 index b43b0a2b01..0000000000 --- a/scripts/exaqm_post.sh_orig +++ /dev/null @@ -1,326 +0,0 @@ -#!/bin/bash - -set -xe - -msg="JOB $job HAS BEGUN" -postmsg "$msg" - -export pgm=aqm_post - -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHaqm/source_util_funcs.sh -source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHaqm/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that runs the post-processor (UPP) on -the output files corresponding to a specified forecast hour. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set OpenMP variables. -# -#----------------------------------------------------------------------- -# -export KMP_AFFINITY=${KMP_AFFINITY_RUN_POST} -export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_POST} -export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_POST} -# -#----------------------------------------------------------------------- -# -# Load modules. -# -#----------------------------------------------------------------------- -# -eval ${PRE_TASK_CMDS} - -nprocs=$(( NNODES_RUN_POST*PPN_RUN_POST )) -if [ -z "${RUN_CMD_POST:-}" ] ; then - print_err_msg_exit "\ - Run command was not set in machine file. \ - Please set RUN_CMD_POST for your platform" -else - print_info_msg "$VERBOSE" " - All executables will be submitted with command \'${RUN_CMD_POST}\'." -fi -# -#----------------------------------------------------------------------- -# -# Remove any files from previous runs and stage necessary files in the -# temporary work directory specified by DATA_FHR. -# -#----------------------------------------------------------------------- -# -rm -f fort.* -cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then - post_config_fp="${CUSTOM_POST_CONFIG_FP}" - print_info_msg " -==================================================================== -Copying the user-defined post flat file specified by CUSTOM_POST_CONFIG_FP -to the temporary work directory (DATA_FHR): - CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" - DATA_FHR = \"${DATA_FHR}\" -====================================================================" -else - if [ "${CPL_AQM}" = "TRUE" ]; then - post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" - else - post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" - fi - print_info_msg " -==================================================================== -Copying the default post flat file specified by post_config_fp to the -temporary work directory (DATA_FHR): - post_config_fp = \"${post_config_fp}\" - DATA_FHR = \"${DATA_FHR}\" -====================================================================" -fi -cp ${post_config_fp} ./postxconfig-NT.txt -cp ${PARMdir}/upp/params_grib2_tbl_new . -if [ ${USE_CRTM} = "TRUE" ]; then - cp ${CRTM_DIR}/fix/EmisCoeff/IR_Water/Big_Endian/Nalli.IRwater.EmisCoeff.bin ./ - cp ${CRTM_DIR}/fix/EmisCoeff/MW_Water/Big_Endian/FAST*.bin ./ - cp ${CRTM_DIR}/fix/EmisCoeff/IR_Land/SEcategory/Big_Endian/NPOESS.IRland.EmisCoeff.bin ./ - cp ${CRTM_DIR}/fix/EmisCoeff/IR_Snow/SEcategory/Big_Endian/NPOESS.IRsnow.EmisCoeff.bin ./ - cp ${CRTM_DIR}/fix/EmisCoeff/IR_Ice/SEcategory/Big_Endian/NPOESS.IRice.EmisCoeff.bin ./ - cp ${CRTM_DIR}/fix/AerosolCoeff/Big_Endian/AerosolCoeff.bin ./ - cp ${CRTM_DIR}/fix/CloudCoeff/Big_Endian/CloudCoeff.bin ./ - cp ${CRTM_DIR}/fix/SpcCoeff/Big_Endian/*.bin ./ - cp ${CRTM_DIR}/fix/TauCoeff/ODPS/Big_Endian/*.bin ./ - print_info_msg " -==================================================================== -Copying the external CRTM fix files from CRTM_DIR to the temporary -work directory (DATA_FHR): - CRTM_DIR = \"${CRTM_DIR}\" - DATA_FHR = \"${DATA_FHR}\" -====================================================================" -fi -# -#----------------------------------------------------------------------- -# -# Get the cycle date and hour (in formats of yyyymmdd and hh, respectively) -# from CDATE. -# -#----------------------------------------------------------------------- -# -yyyymmdd=${PDY} -hh=${cyc} -# -#----------------------------------------------------------------------- -# -# Create the namelist file (itag) containing arguments to pass to the post- -# processor's executable. -# -#----------------------------------------------------------------------- -# -# Set the variable (mnts_secs_str) that determines the suffix in the names -# of the forecast model's write-component output files that specifies the -# minutes and seconds of the corresponding output forecast time. -# -# Note that if the forecast model is instructed to output at some hourly -# interval (via the output_fh parameter in the MODEL_CONFIG_FN file, -# with nsout set to a non-positive value), then the write-component -# output file names will not contain any suffix for the minutes and seconds. -# For this reason, when SUB_HOURLY_POST is not set to "TRUE", mnts_sec_str -# must be set to a null string. -# -mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then - if [ ${fhr}${fmn} = "00000" ]; then - mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) - else - mnts_secs_str=":${fmn}:00" - fi -fi -# -# Set the names of the forecast model's write-component output files. -# -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - DATAFCST=$DATAROOT/${RUN}_forecast${dot_ensmem/./_}_${cyc}.${share_pid} - if [ ! -d ${DATAFCST} ]; then - echo "Fatal error DATAFCST not found in production mode" - exit 7 - fi -else - DATAFCST=$DATAROOT/run_fcst${dot_ensmem/./_}.${share_pid} -fi - -if [ "${CPL_AQM}" = "TRUE" ]; then - dyn_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}${mnts_secs_str}.nc" - phy_file="${COMIN}/${cyc}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}${mnts_secs_str}.nc" -else - dyn_file="${DATAFCST}/dynf${fhr}${mnts_secs_str}.nc" - phy_file="${DATAFCST}/phyf${fhr}${mnts_secs_str}.nc" -fi -# -# Set parameters that specify the actual time (not forecast time) of the -# output. -# -post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours + ${fmn} minutes" "+%Y%m%d%H%M" ) -post_yyyy=${post_time:0:4} -post_mm=${post_time:4:2} -post_dd=${post_time:6:2} -post_hh=${post_time:8:2} -post_mn=${post_time:10:2} -# -# Create the input namelist file to the post-processor executable. -# -if [ "${CPL_AQM}" = "TRUE" ]; then - post_itag_add="aqf_on=.true.," -else - post_itag_add="" -fi -cat > itag <> $pgmout 2>errfile -export err=$?; err_chk -# -#----------------------------------------------------------------------- -# -# Move and rename the output files from the work directory to their final -# location in COMOUT. Also, create symlinks in COMOUT to the -# grib2 files that are needed by the data services group. Then delete -# the work directory. -# -#----------------------------------------------------------------------- -# -# Set variables needed in constructing the names of the grib2 files -# generated by UPP. -# -len_fhr=${#fhr} -if [ ${len_fhr} -eq 2 ]; then - post_fhr=${fhr} -elif [ ${len_fhr} -eq 3 ]; then - if [ "${fhr:0:1}" = "0" ]; then - post_fhr="${fhr:1}" - else - post_fhr="${fhr}" - fi -else - print_err_msg_exit "\ -The \${fhr} variable contains too few or too many characters: - fhr = \"$fhr\"" -fi - -post_mn_or_null="" -dot_post_mn_or_null="" -if [ "${post_mn}" != "00" ]; then - post_mn_or_null="${post_mn}" - dot_post_mn_or_null=".${post_mn}" -fi - -post_fn_suffix="GrbF${post_fhr}${dot_post_mn_or_null}" -post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" -# -# For convenience, change location to COMOUT (where the final output -# from UPP will be located). Then loop through the two files that UPP -# generates (i.e. "...prslev..." and "...natlev..." files) and move, -# rename, and create symlinks to them. -# -cd "${COMOUT}" -basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) -symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" -if [ "${CPL_AQM}" = "TRUE" ]; then - fids=( "cmaq" ) -else - fids=( "prslev" "natlev" ) -fi -for fid in "${fids[@]}"; do - FID=$(echo_uppercase $fid) - post_orig_fn="${FID}.${post_fn_suffix}" - post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} - - # DBN alert - if [ $SENDDBN = "TRUE" ]; then - $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} - fi -done - -rm -rf ${DATA_FHR} - -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Post-processing for forecast hour $fhr completed successfully. - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/scripts/exaqm_pre_post_stat.sh b/scripts/exaqm_pre_post_stat.sh index ba78c63e1f..890ad458a2 100755 --- a/scripts/exaqm_pre_post_stat.sh +++ b/scripts/exaqm_pre_post_stat.sh @@ -63,9 +63,6 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} - if [ "${WORKFLOW_MANAGER}" = "rocoto" ] && [ "${RUN_TASK_RUN_POST}" = "TRUE" ]; then - rm -f "${COMIN}/${cyc}/${TN_RUN_POST}_${PDY}${cyc}_task_complete.txt" - fi fi ist=1 From 3afe49b7067f0e4239705a8f110b8a9d658028e3 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Sat, 25 Nov 2023 03:53:27 +0000 Subject: [PATCH 15/24] correct J-job script for Post_Stat_PM2.5 --- jobs/JAQM_POST_STAT_PM25 | 2 -- 1 file changed, 2 deletions(-) diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index d7b9143532..dc9bde8143 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -71,8 +71,6 @@ This is the J-job script for the task that runs POST-UPP-STAT. # DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" -mkdir -p ${PCOM} - if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export PCOM=${PCOM:-${COMOUT}/wmo} if [ ! -d "${PCOM}" ]; then From cc219364f378e73659aa3fb97f24a32b8f5c2841 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Wed, 29 Nov 2023 20:30:20 +0000 Subject: [PATCH 16/24] update forecast job/script provided by Lin to get rid of harward in forecast run dir --- ecf/defs/aqm_cycled.def | 22 ++++--- jobs/JAQM_DATA_CLEANUP | 46 --------------- jobs/JAQM_FORECAST | 7 +-- scripts/exaqm_forecast.sh | 121 ++++++++++++++++++++------------------ sorc/CMakeLists.txt | 2 +- 5 files changed, 81 insertions(+), 117 deletions(-) delete mode 100755 jobs/JAQM_DATA_CLEANUP diff --git a/ecf/defs/aqm_cycled.def b/ecf/defs/aqm_cycled.def index 79be777fa4..911b802976 100644 --- a/ecf/defs/aqm_cycled.def +++ b/ecf/defs/aqm_cycled.def @@ -1,6 +1,6 @@ suite nco_aqm family primary - edit aqm_ver 'v7.0.0' + edit aqm_ver 'v7.0.1' edit PACKAGEHOME '/lfs/h2/emc/global/noscrub/%EMC_USER%/para/packages/aqm.%aqm_ver%' edit NET 'aqm' edit RUN 'aqm' @@ -13,6 +13,9 @@ suite nco_aqm edit OUTPUTDIR '/lfs/h2/emc/ptmp/%EMC_USER%/ecflow_aqm/para/output/prod/today' family 00 edit CYC '00' + task cycle_end + edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' + cron 23:00 family aqm family v1.0 edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' @@ -57,8 +60,6 @@ suite nco_aqm family forecast task jaqm_forecast trigger ../nexus==complete and ../prep==complete and ../pts_fire_emis==complete - task jaqm_forecast_data_cleanup - trigger ./jaqm_forecast==complete endfamily family post task jaqm_post_f000 @@ -100,6 +101,9 @@ suite nco_aqm endfamily # 00 family 06 edit CYC '06' + task cycle_end + edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' + cron 05:00 family aqm family v1.0 edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' @@ -144,8 +148,6 @@ suite nco_aqm family forecast task jaqm_forecast trigger ../nexus==complete and ../prep==complete and ../pts_fire_emis==complete - task jaqm_forecast_data_cleanup - trigger ./jaqm_forecast==complete endfamily family post task jaqm_post_f000 @@ -385,6 +387,9 @@ suite nco_aqm endfamily # 06 family 12 edit CYC '12' + task cycle_end + edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' + cron 11:00 family aqm family v1.0 edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' @@ -429,8 +434,6 @@ suite nco_aqm family forecast task jaqm_forecast trigger ../nexus==complete and ../prep==complete and ../pts_fire_emis==complete - task jaqm_forecast_data_cleanup - trigger ./jaqm_forecast==complete endfamily family post task jaqm_post_f000 @@ -670,6 +673,9 @@ suite nco_aqm endfamily # 12 family 18 edit CYC '18' + task cycle_end + edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' + cron 17:00 family aqm family v1.0 edit ECF_FILES '%PACKAGEHOME%/ecf/scripts' @@ -714,8 +720,6 @@ suite nco_aqm family forecast task jaqm_forecast trigger ../nexus==complete and ../prep==complete and ../pts_fire_emis==complete - task jaqm_forecast_data_cleanup - trigger ./jaqm_forecast==complete endfamily family post task jaqm_post_f000 diff --git a/jobs/JAQM_DATA_CLEANUP b/jobs/JAQM_DATA_CLEANUP deleted file mode 100755 index 281a01433f..0000000000 --- a/jobs/JAQM_DATA_CLEANUP +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -date -export PS4='$SECONDS + ' -set -xue - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} -export USHaqm=${USHaqm:-${HOMEaqm}/ush} - -export cycle=t${cyc}z -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} - -. ${PARMaqm}/config/var_defns.sh -. ${USHaqm}/source_util_funcs.sh - -mkdir -p $DATA -cd $DATA - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export CDATE=${PDY}${cyc} - -setpdy.sh -. ./PDY - -env -############################################## -# Set variables used in the script -############################################## -${HOMEaqm}/scripts/exaqm_data_cleanup.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -########################################## -# Remove the Temporary working directory -########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA - -date diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 662613b50c..012615867a 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -9,15 +9,12 @@ export USHaqm=${USHaqm:-${HOMEaqm}/ush} export PARMaqm=${PARMaqm:-${HOMEaqm}/parm} export subcyc=0 +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} . ${PARMaqm}/config/var_defns.sh . $USHaqm/source_util_funcs.sh -export job=JAQM_FORECAST -export share_pid=${share_pid:-${PDY}${cyc}} -export pid=$share_pid -export jobid=${job}.${pid} - export DATA=${DATA:-${DATAROOT}/${jobid}} mkdir -p ${DATA} cd ${DATA} diff --git a/scripts/exaqm_forecast.sh b/scripts/exaqm_forecast.sh index a8c4cb248c..2c7fb4aee8 100755 --- a/scripts/exaqm_forecast.sh +++ b/scripts/exaqm_forecast.sh @@ -396,7 +396,7 @@ create_symlink_to_file target="${FIELD_DICT_FP}" \ relative="${relative_link_flag}" if [ ${WRITE_DOPOST} = "TRUE" ]; then - cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat + cpreq ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " @@ -414,9 +414,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_config_fp = \"${post_config_fp}\" ====================================================================" fi - cp ${post_config_fp} ./postxconfig-NT_FH00.txt - cp ${post_config_fp} ./postxconfig-NT.txt - cp ${PARMdir}/upp/params_grib2_tbl_new . + cpreq ${post_config_fp} ./postxconfig-NT_FH00.txt + cpreq ${post_config_fp} ./postxconfig-NT.txt + cpreq ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: if [ "${CPL_AQM}" = "TRUE" ]; then post_itag_add="aqf_on=.true.," @@ -453,7 +453,7 @@ for the current cycle's (cdate) run directory (DATA) failed: err_exit "${message_txt}" fi else - cp "${FV3_NML_FP}" "${DATA}/${FV3_NML_FN}" + cpreq "${FV3_NML_FP}" "${DATA}/${FV3_NML_FN}" fi # #----------------------------------------------------------------------- @@ -464,12 +464,7 @@ fi #----------------------------------------------------------------------- # flag_fcst_restart="FALSE" -if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then - cp input.nml input.nml_orig - cp model_configure model_configure_orig - if [ "${CPL_AQM}" = "TRUE" ]; then - cp aqm.rc aqm.rc_orig - fi +if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${COMOUT}/RESTART/*.coupler.res)" ]; then relative_link_flag="FALSE" flag_fcst_restart="TRUE" @@ -501,7 +496,7 @@ for the current cycle's (cdate) run directory (DATA) failed: num_rst_files=0 for file_id in "${file_ids[@]}"; do - if [ -e "${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" ]; then + if [ -e "${COMOUT}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" ]; then (( num_rst_files=num_rst_files+1 )) fi done @@ -517,7 +512,7 @@ for the current cycle's (cdate) run directory (DATA) failed: if [ -e "${file_id}" ]; then rm -f "${file_id}" fi - target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" + target="${COMOUT}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" symlink="${file_id}" create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" done @@ -604,18 +599,6 @@ fi # #----------------------------------------------------------------------- # -# Pre-generate symlink to forecast RESTART in DATA for early start of -# the next cycle -# -#----------------------------------------------------------------------- -# -if [ "${CPL_AQM}" = "TRUE" ]; then - # create an intermediate symlink to RESTART - ln -sf "${DATA}/RESTART" "${COMOUT}/RESTART" -fi -# -#----------------------------------------------------------------------- -# # Call the function that creates the NEMS configuration file within each # cycle directory. # @@ -634,6 +617,61 @@ fi # #----------------------------------------------------------------------- # +# make symbolic links to write forecast files directly in COMOUT +# +#----------------------------------------------------------------------- +# +fhr_ct=0 +fhr=0 +NLN=${NLN:-"/bin/ln -sf"} +while [ $fhr -le ${FCST_LEN_HRS} ]; do + fhr_ct=$(printf "%03d" $fhr) + source_dyn="dynf${fhr_ct}.nc" + source_phy="phyf${fhr_ct}.nc" + target_dyn="${COMOUT}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" + target_phy="${COMOUT}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" + eval $NLN ${target_dyn} ${source_dyn} + eval $NLN ${target_phy} ${source_phy} + (( fhr=fhr+1 )) +done +eval $NLN ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} ${AQM_RC_PRODUCT_FN} +# +#----------------------------------------------------------------------- +# +# make symbolic links to write forecast RESTART files directly in COMOUT/RESTART +# +#----------------------------------------------------------------------- +# +mkdir -p ${COMOUT}/RESTART +cd ${DATA}/RESTART +file_ids=( "coupler.res" "fv_core.res.nc" "fv_core.res.tile1.nc" "fv_srf_wnd.res.tile1.nc" "fv_tracer.res.tile1.nc" "phy_data.nc" "sfc_data.nc" ) +num_file_ids=${#file_ids[*]} +read -a restart_hrs <<< "${RESTART_INTERVAL}" +num_restart_hrs=${#restart_hrs[*]} +# 06Z and 12Z +if [ $cyc = 06 -o $cyc = 12 ]; then + # 06Z and 12Z + for (( ih_rst=${num_restart_hrs}-1; ih_rst>=0; ih_rst-- )); do + cdate_restart_hr=`$NDATE +${restart_hrs[ih_rst]} ${PDY}${cyc}` + rst_yyyymmdd="${cdate_restart_hr:0:8}" + rst_hh="${cdate_restart_hr:8:2}" + for file_id in "${file_ids[@]}"; do + eval $NLN ${COMOUT}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id} ${rst_yyyymmdd}.${rst_hh}0000.${file_id} + done + done +else + # 00Z and 18Z + cdate_restart_hr=`$NDATE +6 ${PDY}${cyc}` + rst_yyyymmdd="${cdate_restart_hr:0:8}" + rst_hh="${cdate_restart_hr:8:2}" + for file_id in "${file_ids[@]}"; do + eval $NLN ${COMOUT}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id} ${file_id} + done +fi +cd ${DATA} +# +#----------------------------------------------------------------------- +# # Run the FV3-LAM model. Note that we have to launch the forecast from # the current cycle's directory because the FV3 executable will look for # input files in the current directory. Since those files have been @@ -645,37 +683,8 @@ fi startmsg eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk -# -#----------------------------------------------------------------------- -# -# Copy RESTART directory to COMIN only for NCO mode and AQM. -# Copy AQM output product file to COMOUT only for NCO mode. -# Copy dyn and phy files to COMIN only for AQM. -# -#----------------------------------------------------------------------- -# -if [ "${CPL_AQM}" = "TRUE" ]; then - if [ -d "${COMOUT}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then - rm -rf "${COMOUT}/RESTART" - fi - if [ "$(ls -A ${DATA}/RESTART)" ]; then - cp -Rp ${DATA}/RESTART ${COMOUT} - fi - - cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} - - fhr_ct=0 - fhr=0 - while [ $fhr -le ${FCST_LEN_HRS} ]; do - fhr_ct=$(printf "%03d" $fhr) - source_dyn="${DATA}/dynf${fhr_ct}.nc" - source_phy="${DATA}/phyf${fhr_ct}.nc" - target_dyn="${COMOUT}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" - target_phy="${COMOUT}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" - [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn} - [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy} - (( fhr=fhr+1 )) - done +if [ -e "$pgmout" ]; then + cat $pgmout fi # #----------------------------------------------------------------------- diff --git a/sorc/CMakeLists.txt b/sorc/CMakeLists.txt index 57f9fafaaf..2b44a1674b 100644 --- a/sorc/CMakeLists.txt +++ b/sorc/CMakeLists.txt @@ -166,7 +166,7 @@ if (BUILD_UFS) if(NOT CCPP_SUITES) if(CPL_AQM) - set(CCPP_SUITES "FV3_GFS_v15p2,FV3_GFS_v16,FV3_GFS_v17_p8") + set(CCPP_SUITES "FV3_GFS_v16") else() set(CCPP_SUITES "FV3_GFS_2017_gfdlmp,FV3_GFS_2017_gfdlmp_regional,FV3_GFS_v15p2,FV3_GFS_v16,FV3_GFS_v17_p8,FV3_RRFS_v1beta,FV3_HRRR,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0") endif() From a417c77b39efdc19ad878308bf2ffc1e3cb5b0c7 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Thu, 30 Nov 2023 03:18:54 +0000 Subject: [PATCH 17/24] add deleting in J-job scripts and rm cleanup excript and add more cycles in restart files --- jobs/JAQM_BIAS_CORRECTION_O3 | 12 ++++++++++++ jobs/JAQM_BIAS_CORRECTION_PM25 | 12 ++++++++++++ jobs/JAQM_FIRE_EMISSION | 14 +++++++++++++- jobs/JAQM_FORECAST | 15 ++++++++++++++- jobs/JAQM_ICS | 14 +++++++++++++- jobs/JAQM_LBCS | 14 +++++++++++++- jobs/JAQM_MAKE_ICS | 12 ++++++++++++ jobs/JAQM_MAKE_LBCS | 12 ++++++++++++ jobs/JAQM_NEXUS_EMISSION | 12 ++++++++++++ jobs/JAQM_NEXUS_POST_SPLIT | 14 +++++++++++++- jobs/JAQM_POINT_SOURCE | 14 +++++++++++++- jobs/JAQM_POST | 25 +++++++++++++++++++------ jobs/JAQM_POST_STAT_O3 | 12 ++++++++++++ jobs/JAQM_POST_STAT_PM25 | 12 ++++++++++++ jobs/JAQM_PRE_POST_STAT | 14 +++++++++++++- parm/config/var_defns.sh.nco.static | 2 +- scripts/exaqm_data_cleanup.sh | 26 -------------------------- 17 files changed, 196 insertions(+), 40 deletions(-) delete mode 100755 scripts/exaqm_data_cleanup.sh diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index f69e6b95de..7ed3f605b6 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -97,7 +97,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_o3.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -112,3 +123,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index f81fcbbc4b..ca8db66fd0 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -99,7 +99,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_pm25.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -114,3 +125,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index 83a95c741b..b1d81ecdb7 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -96,7 +96,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_fire_emission.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -110,3 +121,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 012615867a..2ce84b86a2 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -86,7 +86,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_forecast.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -100,3 +111,5 @@ In directory: \"${scrfunc_dir}\" #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + +date diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index f2f558f3f6..3ef3a1371d 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -89,7 +89,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_ics.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -103,3 +114,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 9fe0450f3e..68affdbb55 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -92,7 +92,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_lbcs.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -106,3 +117,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index 9cf5f50213..afdb55a187 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -103,7 +103,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_ics.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -117,3 +128,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 1d54f6307e..501aa05947 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -106,7 +106,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_lbcs.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -120,3 +131,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 4a9119ae11..c12f8cdb9b 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -108,7 +108,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_emission.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -124,3 +135,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index 2a98204365..d553bf5693 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -97,7 +97,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_post_split.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -112,3 +123,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index cd2549777b..983a4984ca 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -86,7 +86,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_point_source.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -103,3 +114,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index f935772d13..ea1476d78f 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -84,14 +84,14 @@ fi # mkdir -p ${COMOUT} -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then - export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" -else - export DATA_FHR="${DATA:-$COMOUT}/$fhr" -fi -check_for_preexist_dir_file "${DATA_FHR}" "delete" +export DATA_FHR="${DATA:-$COMOUT}/$fhr" + +#check_for_preexist_dir_file "${DATA_FHR}" "delete" + mkdir -p ${DATA_FHR} + cd ${DATA_FHR} + # #----------------------------------------------------------------------- # Make sure that fhr is a non-empty string consisting of only digits. @@ -125,6 +125,18 @@ fi # fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -139,3 +151,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index e652d7b216..4a478adb2a 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -88,7 +88,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_o3.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -103,3 +114,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index dc9bde8143..98d7c2f55d 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -90,7 +90,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_pm25.sh export err=$?; err_chk # +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -105,3 +116,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 813ba359d4..1452cbdef8 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -76,7 +76,18 @@ startmsg ${HOMEaqm}/scripts/exaqm_pre_post_stat.sh export err=$?; err_chk # - +====================================================================== +msg="JOB ${job} HAS COMPLETED NORMALLY." +postmsg "${msg}" + +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi + +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} +fi +======================================================================= # Print exit message print_info_msg " ======================================================================== @@ -91,3 +102,4 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 +date diff --git a/parm/config/var_defns.sh.nco.static b/parm/config/var_defns.sh.nco.static index bc35e99763..f6df76b308 100644 --- a/parm/config/var_defns.sh.nco.static +++ b/parm/config/var_defns.sh.nco.static @@ -304,7 +304,7 @@ OMP_NUM_THREADS_RUN_FCST='1' OMP_STACKSIZE_RUN_FCST='512m' DT_ATMOS='180' FHROT='0' -RESTART_INTERVAL='6 24 42 60' +RESTART_INTERVAL='6 12 18 24 42 60' WRITE_DOPOST='FALSE' LAYOUT_X='50' LAYOUT_Y='34' diff --git a/scripts/exaqm_data_cleanup.sh b/scripts/exaqm_data_cleanup.sh deleted file mode 100755 index efe9c932c6..0000000000 --- a/scripts/exaqm_data_cleanup.sh +++ /dev/null @@ -1,26 +0,0 @@ -#! /bin/bash - -set -xe -############################################## -# Clean up the DATA directory from previous cycle if found -############################################## -[[ $KEEPDATA = "YES" ]] && exit 0 - -############################################## -# Set variables used in the script -############################################## -CDATE=${PDY}${cyc} -GDATE=$($NDATE -24 $CDATE) -gPDY=$(echo $GDATE | cut -c1-8) -gcyc=$(echo $GDATE | cut -c9-10) - -############################################## -# Looking for the following directory for cleanup -# aqm_forecast_${gcyc}.${gPDY}${gcyc} -############################################## -target_for_delete=${DATAROOT}/aqm_forecast_${gcyc}.${gPDY}${gcyc} -echo "If DATA exist in ${target_for_delete} will be deleted" -[[ -d $target_for_delete ]] && rm -rf $target_for_delete - -##################################################### -exit 0 From 37988a62c432ad40db8b024c3ed0a9c262fae7c0 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Thu, 30 Nov 2023 04:06:55 +0000 Subject: [PATCH 18/24] update J-job scripts and exscript for post --- jobs/JAQM_BIAS_CORRECTION_O3 | 4 ++-- jobs/JAQM_BIAS_CORRECTION_PM25 | 4 ++-- jobs/JAQM_FIRE_EMISSION | 4 ++-- jobs/JAQM_FORECAST | 4 ++-- jobs/JAQM_ICS | 4 ++-- jobs/JAQM_LBCS | 4 ++-- jobs/JAQM_MAKE_ICS | 4 ++-- jobs/JAQM_MAKE_LBCS | 4 ++-- jobs/JAQM_NEXUS_EMISSION | 4 ++-- jobs/JAQM_NEXUS_POST_SPLIT | 4 ++-- jobs/JAQM_POINT_SOURCE | 4 ++-- jobs/JAQM_POST | 4 ++-- jobs/JAQM_POST_STAT_O3 | 4 ++-- jobs/JAQM_POST_STAT_PM25 | 4 ++-- jobs/JAQM_PRE_POST_STAT | 4 ++-- scripts/exaqm_post.sh | 2 -- 16 files changed, 30 insertions(+), 32 deletions(-) diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index 7ed3f605b6..a90c25039a 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -97,7 +97,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_o3.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -108,7 +108,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index ca8db66fd0..1a5bf03543 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -99,7 +99,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_bias_correction_pm25.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -110,7 +110,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index b1d81ecdb7..2444ceabb6 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -96,7 +96,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_fire_emission.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -107,7 +107,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index 2ce84b86a2..ef6e3c4dcc 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -86,7 +86,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_forecast.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -97,7 +97,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 3ef3a1371d..53074c82ce 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -89,7 +89,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_ics.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -100,7 +100,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 68affdbb55..6381c1993c 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -92,7 +92,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_lbcs.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -103,7 +103,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index afdb55a187..b720a2379d 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -103,7 +103,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_ics.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -114,7 +114,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index 501aa05947..b89b67eeb8 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -106,7 +106,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_make_lbcs.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -117,7 +117,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index c12f8cdb9b..5a41ff7f93 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -108,7 +108,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_emission.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -119,7 +119,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index d553bf5693..b2efae917a 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -97,7 +97,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_nexus_post_split.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -108,7 +108,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 983a4984ca..42094cded4 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -86,7 +86,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_point_source.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -97,7 +97,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index ea1476d78f..86559611a7 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -125,7 +125,7 @@ fi # fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -136,7 +136,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 4a478adb2a..611d9de017 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -88,7 +88,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_o3.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -99,7 +99,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index 98d7c2f55d..72064985b8 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -90,7 +90,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_post_stat_pm25.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -101,7 +101,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 1452cbdef8..1b17ee136c 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -76,7 +76,7 @@ startmsg ${HOMEaqm}/scripts/exaqm_pre_post_stat.sh export err=$?; err_chk # -====================================================================== +#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" @@ -87,7 +87,7 @@ fi if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi -======================================================================= +#======================================================================= # Print exit message print_info_msg " ======================================================================== diff --git a/scripts/exaqm_post.sh b/scripts/exaqm_post.sh index 4922248c2a..132fc64d32 100755 --- a/scripts/exaqm_post.sh +++ b/scripts/exaqm_post.sh @@ -277,8 +277,6 @@ for fid in "${fids[@]}"; do fi done -rm -rf ${DATA_FHR} - # #----------------------------------------------------------------------- # From 1dfdaedced231fb5dd8154b62eab80dd00573ad9 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Thu, 30 Nov 2023 14:05:50 +0000 Subject: [PATCH 19/24] clean up J-Post script --- jobs/JAQM_POST | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 86559611a7..0343212dd9 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -115,17 +115,6 @@ ${HOMEaqm}/scripts/exaqm_post.sh export err=$?; err_chk # #----------------------------------------------------------------------- -# -if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then - cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) - CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) - FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} -fi - -# -fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) -# -#====================================================================== msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" From 086733bb43285d9ddc523164ee4b082dec6ac8c9 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Thu, 30 Nov 2023 23:56:13 +0000 Subject: [PATCH 20/24] updated UPP jobs by Lin and move pgout from J-job to exscripts --- jobs/JAQM_BIAS_CORRECTION_O3 | 4 ---- jobs/JAQM_BIAS_CORRECTION_PM25 | 4 ---- jobs/JAQM_FIRE_EMISSION | 4 ---- jobs/JAQM_FORECAST | 4 ---- jobs/JAQM_ICS | 4 ---- jobs/JAQM_LBCS | 4 ---- jobs/JAQM_MAKE_ICS | 4 ---- jobs/JAQM_MAKE_LBCS | 4 ---- jobs/JAQM_NEXUS_EMISSION | 4 ---- jobs/JAQM_NEXUS_POST_SPLIT | 4 ---- jobs/JAQM_POINT_SOURCE | 4 ---- jobs/JAQM_POST | 24 ------------------------ jobs/JAQM_POST_STAT_O3 | 4 ---- jobs/JAQM_POST_STAT_PM25 | 4 ---- jobs/JAQM_PRE_POST_STAT | 4 ---- scripts/exaqm_bias_correction_o3.sh | 19 +++++++++++++++---- scripts/exaqm_bias_correction_pm25.sh | 19 +++++++++++++++---- scripts/exaqm_lbcs.sh | 4 +++- scripts/exaqm_make_ics.sh | 13 +++---------- scripts/exaqm_make_lbcs.sh | 3 +++ scripts/exaqm_nexus_emission.sh | 4 +++- scripts/exaqm_nexus_post_split.sh | 3 +++ scripts/exaqm_post.sh | 23 ++++++++++++----------- scripts/exaqm_post_stat_o3.sh | 6 ++++++ scripts/exaqm_post_stat_pm25.sh | 7 ++++++- ush/job_preamble.sh | 1 - 26 files changed, 69 insertions(+), 113 deletions(-) diff --git a/jobs/JAQM_BIAS_CORRECTION_O3 b/jobs/JAQM_BIAS_CORRECTION_O3 index a90c25039a..cdcbd7d500 100755 --- a/jobs/JAQM_BIAS_CORRECTION_O3 +++ b/jobs/JAQM_BIAS_CORRECTION_O3 @@ -101,10 +101,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_BIAS_CORRECTION_PM25 b/jobs/JAQM_BIAS_CORRECTION_PM25 index 1a5bf03543..749f1d9841 100755 --- a/jobs/JAQM_BIAS_CORRECTION_PM25 +++ b/jobs/JAQM_BIAS_CORRECTION_PM25 @@ -103,10 +103,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_FIRE_EMISSION b/jobs/JAQM_FIRE_EMISSION index 2444ceabb6..8fc1376607 100755 --- a/jobs/JAQM_FIRE_EMISSION +++ b/jobs/JAQM_FIRE_EMISSION @@ -100,10 +100,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_FORECAST b/jobs/JAQM_FORECAST index ef6e3c4dcc..1a4a79ef77 100755 --- a/jobs/JAQM_FORECAST +++ b/jobs/JAQM_FORECAST @@ -90,10 +90,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_ICS b/jobs/JAQM_ICS index 53074c82ce..250c720b29 100755 --- a/jobs/JAQM_ICS +++ b/jobs/JAQM_ICS @@ -93,10 +93,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_LBCS b/jobs/JAQM_LBCS index 6381c1993c..ace2b8dbd3 100755 --- a/jobs/JAQM_LBCS +++ b/jobs/JAQM_LBCS @@ -96,10 +96,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_MAKE_ICS b/jobs/JAQM_MAKE_ICS index b720a2379d..bbc35c17ee 100755 --- a/jobs/JAQM_MAKE_ICS +++ b/jobs/JAQM_MAKE_ICS @@ -107,10 +107,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_MAKE_LBCS b/jobs/JAQM_MAKE_LBCS index b89b67eeb8..fcad7337ce 100755 --- a/jobs/JAQM_MAKE_LBCS +++ b/jobs/JAQM_MAKE_LBCS @@ -110,10 +110,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_NEXUS_EMISSION b/jobs/JAQM_NEXUS_EMISSION index 5a41ff7f93..5db9fceb98 100755 --- a/jobs/JAQM_NEXUS_EMISSION +++ b/jobs/JAQM_NEXUS_EMISSION @@ -112,10 +112,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_NEXUS_POST_SPLIT b/jobs/JAQM_NEXUS_POST_SPLIT index b2efae917a..425a29157d 100755 --- a/jobs/JAQM_NEXUS_POST_SPLIT +++ b/jobs/JAQM_NEXUS_POST_SPLIT @@ -101,10 +101,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_POINT_SOURCE b/jobs/JAQM_POINT_SOURCE index 42094cded4..729cba4980 100755 --- a/jobs/JAQM_POINT_SOURCE +++ b/jobs/JAQM_POINT_SOURCE @@ -90,10 +90,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_POST b/jobs/JAQM_POST index 0343212dd9..d47a972862 100755 --- a/jobs/JAQM_POST +++ b/jobs/JAQM_POST @@ -72,26 +72,7 @@ on the output files corresponding to a specified forecast hour. # Set the run directory. #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/${cyc}}" -# -# If SUB_HOURLY_POST is not set to "TRUE", ensure that the forecast -# minutes (fmn) are set to "00". This is necessary in order to pass -# "fmn" into the post ex-script for the calculation of post_time. -# -if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then - export fmn="00" -fi -# mkdir -p ${COMOUT} - -export DATA_FHR="${DATA:-$COMOUT}/$fhr" - -#check_for_preexist_dir_file "${DATA_FHR}" "delete" - -mkdir -p ${DATA_FHR} - -cd ${DATA_FHR} - # #----------------------------------------------------------------------- # Make sure that fhr is a non-empty string consisting of only digits. @@ -118,10 +99,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi @@ -140,4 +117,3 @@ In directory: \"${scrfunc_dir}\" # { restore_shell_opts; } > /dev/null 2>&1 -date diff --git a/jobs/JAQM_POST_STAT_O3 b/jobs/JAQM_POST_STAT_O3 index 611d9de017..de5a652072 100755 --- a/jobs/JAQM_POST_STAT_O3 +++ b/jobs/JAQM_POST_STAT_O3 @@ -92,10 +92,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_POST_STAT_PM25 b/jobs/JAQM_POST_STAT_PM25 index 72064985b8..b6991a51b9 100755 --- a/jobs/JAQM_POST_STAT_PM25 +++ b/jobs/JAQM_POST_STAT_PM25 @@ -94,10 +94,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/jobs/JAQM_PRE_POST_STAT b/jobs/JAQM_PRE_POST_STAT index 1b17ee136c..1a07ed2348 100755 --- a/jobs/JAQM_PRE_POST_STAT +++ b/jobs/JAQM_PRE_POST_STAT @@ -80,10 +80,6 @@ export err=$?; err_chk msg="JOB ${job} HAS COMPLETED NORMALLY." postmsg "${msg}" -if [ -e "${pgmout}" ]; then - cat ${pgmout} -fi - if [ "${KEEPDATA}" != "YES" ]; then rm -rf ${DATA} fi diff --git a/scripts/exaqm_bias_correction_o3.sh b/scripts/exaqm_bias_correction_o3.sh index 542202c9a4..10641e6be7 100755 --- a/scripts/exaqm_bias_correction_o3.sh +++ b/scripts/exaqm_bias_correction_o3.sh @@ -155,6 +155,9 @@ mkdir -p "${DATA}/data" startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi done #----------------------------------------------------------------------------- @@ -202,7 +205,9 @@ cp ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cy startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then @@ -245,7 +250,9 @@ cp ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp ${DATA}/out/ozone.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then @@ -272,7 +279,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp ${DATA}/${NET}.${cycle}.awpozcon*bc*.grib2 ${COMOUT} #----------------------------------------------------------------------------- @@ -336,7 +345,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi # split into max_1h and max_8h files and copy to grib227 wgrib2 aqm-maxi_bc.${id_domain}.grib2 |grep "OZMAX1" | wgrib2 -i aqm-maxi_bc.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_o3_bc.${id_domain}.grib2 wgrib2 aqm-maxi_bc.${id_domain}.grib2 |grep "OZMAX8" | wgrib2 -i aqm-maxi_bc.${id_domain}.grib2 -grib ${NET}.${cycle}.max_8hr_o3_bc.${id_domain}.grib2 diff --git a/scripts/exaqm_bias_correction_pm25.sh b/scripts/exaqm_bias_correction_pm25.sh index d1a8efc0c0..c564f11eed 100755 --- a/scripts/exaqm_bias_correction_pm25.sh +++ b/scripts/exaqm_bias_correction_pm25.sh @@ -153,6 +153,9 @@ mkdir -p "${DATA}/data" startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi done #----------------------------------------------------------------------------- @@ -200,7 +203,9 @@ cp ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cy startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$? err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then @@ -224,7 +229,9 @@ cp ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp $DATA/out/pm2.5.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then @@ -250,7 +257,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cp ${DATA}/${NET}.${cycle}.pm25*bc*.grib2 ${COMOUT} #----------------------------------------------------------------------- @@ -313,7 +322,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi # split into two files: one for 24hr_ave and one for 1h_max wgrib2 aqm-pm25_bc.${id_domain}.grib2 |grep "PMTF" | ${WGRIB2} -i aqm-pm25_bc.${id_domain}.grib2 -grib aqm.t${cyc}z.ave_24hr_pm25_bc.793.grib2 wgrib2 aqm-pm25_bc.${id_domain}.grib2 |grep "PDMAX1" | ${WGRIB2} -i aqm-pm25_bc.${id_domain}.grib2 -grib aqm.t${cyc}z.max_1hr_pm25_bc.793.grib2 diff --git a/scripts/exaqm_lbcs.sh b/scripts/exaqm_lbcs.sh index 66c42c33ce..dd543fc47d 100755 --- a/scripts/exaqm_lbcs.sh +++ b/scripts/exaqm_lbcs.sh @@ -288,7 +288,9 @@ Please ensure that you've built this executable." sync eval ${RUN_CMD_AQMLBC} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi cp -rp ${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f*.nc ${INPUT_DATA} print_info_msg " diff --git a/scripts/exaqm_make_ics.sh b/scripts/exaqm_make_ics.sh index 1e2a768304..13a8b180a6 100755 --- a/scripts/exaqm_make_ics.sh +++ b/scripts/exaqm_make_ics.sh @@ -590,16 +590,9 @@ fi startmsg eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk -#eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} || \ -# print_err_msg_exit "\ -#Call to executable (exec_fp) to generate surface and initial conditions -#(ICs) files for the FV3-LAM failed: -# exec_fp = \"${exec_fp}\" -#The external model from which the ICs files are to be generated is: -# EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -#The external model files that are inputs to the executable (exec_fp) are -#located in the following directory: -# extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_make_lbcs.sh b/scripts/exaqm_make_lbcs.sh index f818d75e6f..922fdc4870 100755 --- a/scripts/exaqm_make_lbcs.sh +++ b/scripts/exaqm_make_lbcs.sh @@ -556,6 +556,9 @@ $settings" startmsg eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi # # Move LBCs file for the current lateral boundary update time to the LBCs # work directory. Note that we rename the file by including in its name diff --git a/scripts/exaqm_nexus_emission.sh b/scripts/exaqm_nexus_emission.sh index 9b776fa6e5..6e97f3c2e0 100755 --- a/scripts/exaqm_nexus_emission.sh +++ b/scripts/exaqm_nexus_emission.sh @@ -363,7 +363,9 @@ fi startmsg eval ${RUN_CMD_AQM} ${EXECaqm}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi # #----------------------------------------------------------------------- # diff --git a/scripts/exaqm_nexus_post_split.sh b/scripts/exaqm_nexus_post_split.sh index 43b5e0ffb5..46f7c38ae1 100755 --- a/scripts/exaqm_nexus_post_split.sh +++ b/scripts/exaqm_nexus_post_split.sh @@ -89,6 +89,9 @@ if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then else ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/${cyc}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" >> $pgmout 2>errfile export err=$? + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi if [ $err -ne 0 ]; then message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed." err_exit "${message_txt}" diff --git a/scripts/exaqm_post.sh b/scripts/exaqm_post.sh index 132fc64d32..a62d4a7286 100755 --- a/scripts/exaqm_post.sh +++ b/scripts/exaqm_post.sh @@ -83,7 +83,7 @@ fi #----------------------------------------------------------------------- # # Remove any files from previous runs and stage necessary files in the -# temporary work directory specified by DATA_FHR. +# temporary work directory specified by DATA. # #----------------------------------------------------------------------- # @@ -94,9 +94,9 @@ if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then print_info_msg " ==================================================================== Copying the user-defined post flat file specified by CUSTOM_POST_CONFIG_FP -to the temporary work directory (DATA_FHR): +to the temporary work directory (DATA): CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" - DATA_FHR = \"${DATA_FHR}\" + DATA = \"${DATA}\" ====================================================================" else if [ "${CPL_AQM}" = "TRUE" ]; then @@ -107,9 +107,9 @@ else print_info_msg " ==================================================================== Copying the default post flat file specified by post_config_fp to the -temporary work directory (DATA_FHR): +temporary work directory (DATA): post_config_fp = \"${post_config_fp}\" - DATA_FHR = \"${DATA_FHR}\" + DATA = \"${DATA}\" ====================================================================" fi cp ${post_config_fp} ./postxconfig-NT.txt @@ -127,9 +127,9 @@ if [ ${USE_CRTM} = "TRUE" ]; then print_info_msg " ==================================================================== Copying the external CRTM fix files from CRTM_DIR to the temporary -work directory (DATA_FHR): +work directory (DATA): CRTM_DIR = \"${CRTM_DIR}\" - DATA_FHR = \"${DATA_FHR}\" + DATA = \"${DATA}\" ====================================================================" fi # @@ -203,7 +203,7 @@ EOF # #----------------------------------------------------------------------- # -# Run the UPP executable in the temporary directory (DATA_FHR) for this +# Run the UPP executable in the temporary directory (DATA) for this # output time. # #----------------------------------------------------------------------- @@ -214,6 +214,9 @@ Starting post-processing for fhr = $fhr hr..." startmsg eval ${RUN_CMD_POST} ${EXECaqm}/upp.x < itag ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi # #----------------------------------------------------------------------- # @@ -258,8 +261,6 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri # rename, and create symlinks to them. # cd "${COMOUT}" -basetime=$yyyymmdd$hh -symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" if [ "${CPL_AQM}" = "TRUE" ]; then fids=( "cmaq" ) else @@ -269,7 +270,7 @@ for fid in "${fids[@]}"; do FID=$(echo_uppercase $fid) post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA}/${post_orig_fn} ${post_renamed_fn} # DBN alert if [ $SENDDBN = "YES" ]; then diff --git a/scripts/exaqm_post_stat_o3.sh b/scripts/exaqm_post_stat_o3.sh index c94e223da0..4efed817d9 100755 --- a/scripts/exaqm_post_stat_o3.sh +++ b/scripts/exaqm_post_stat_o3.sh @@ -109,6 +109,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk +f [ -e "${pgmout}" ]; then + cat ${pgmout} +fi if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -230,6 +233,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi # split into max_1h and max_8h files and copy to grib227 wgrib2 aqm-maxi.${id_domain}.grib2 |grep "OZMAX1" | wgrib2 -i aqm-maxi.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 diff --git a/scripts/exaqm_post_stat_pm25.sh b/scripts/exaqm_post_stat_pm25.sh index a553ee7664..12b13c9004 100755 --- a/scripts/exaqm_post_stat_pm25.sh +++ b/scripts/exaqm_post_stat_pm25.sh @@ -112,7 +112,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk - +if [ -e "${pgmout}" ]; then + cat ${pgmout} +fi cat ${NET}.${cycle}.pm25.*.${id_domain}.grib2 >> ${NET}.${cycle}.1hpm25.${id_domain}.grib2 export grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" @@ -210,6 +212,9 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk + if [ -e "${pgmout}" ]; then + cat ${pgmout} + fi wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2 wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PDMAX1" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.max_1hr_pm25.${id_domain}.grib2 diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 0fb80f7f45..04e6ebbdd9 100755 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -26,7 +26,6 @@ export NET="${NET:-${NET_dfv}}" export RUN="${RUN:-${RUN_dfv}}" export model_ver="${model_ver:-${model_ver_dfv}}" export COMROOT="${COMROOT:-${COMROOT_dfv}}" -#export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_dfv}}" export KEEPDATA="${KEEPDATA:-${KEEPDATA_dfv}}" export MAILTO="${MAILTO:-${MAILTO_dfv}}" From 660b82fa078750639ba6e63a09673ad4c8b6d9cf Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 1 Dec 2023 00:10:16 +0000 Subject: [PATCH 21/24] update hash number for UFSWM --- sorc/Externals.cfg | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sorc/Externals.cfg b/sorc/Externals.cfg index 40f8465cbe..ecf7c12315 100644 --- a/sorc/Externals.cfg +++ b/sorc/Externals.cfg @@ -12,8 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = production/AQM.v7 -#hash = 4cf17f7 -hash = 80d0fe8 +hash = 5aa916b local_path = ufs-weather-model required = True From 9f1da80492bd3b47d9a3d17941e1a1d62579d524 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 1 Dec 2023 01:50:09 +0000 Subject: [PATCH 22/24] correct a typo in exaqm_post_stat_o3.sh --- scripts/exaqm_post_stat_o3.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exaqm_post_stat_o3.sh b/scripts/exaqm_post_stat_o3.sh index 4efed817d9..2d1685fdbb 100755 --- a/scripts/exaqm_post_stat_o3.sh +++ b/scripts/exaqm_post_stat_o3.sh @@ -109,7 +109,7 @@ EOF1 startmsg eval ${RUN_CMD_SERIAL} ${EXECaqm}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} >> $pgmout 2>errfile export err=$?; err_chk -f [ -e "${pgmout}" ]; then +if [ -e "${pgmout}" ]; then cat ${pgmout} fi From 125c67e64610fd7aed0ae0787a4b9966c8517d85 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 1 Dec 2023 17:48:01 +0000 Subject: [PATCH 23/24] update hash numbers with Externals.cfg --- sorc/Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/Externals.cfg b/sorc/Externals.cfg index ecf7c12315..981c5573e6 100644 --- a/sorc/Externals.cfg +++ b/sorc/Externals.cfg @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 54cbb57 +hash = 59a0175 local_path = UPP required = True @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 9bfc6f4 +hash = 12f932f local_path = arl_nexus required = True From 8eaada333507067cfa9ccdafc33376c93645e800 Mon Sep 17 00:00:00 2001 From: "Jianping.Huang" Date: Fri, 1 Dec 2023 20:17:46 +0000 Subject: [PATCH 24/24] update External.cfg --- sorc/Externals.cfg | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sorc/Externals.cfg b/sorc/Externals.cfg index 981c5573e6..66a232804c 100644 --- a/sorc/Externals.cfg +++ b/sorc/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = production/AQM.v7 -hash = 5aa916b +hash = 5aa916b local_path = ufs-weather-model required = True @@ -21,16 +21,16 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 59a0175 +hash = 59a0175 local_path = UPP required = True [arl_nexus] protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS -# Specify either a branch name or a hash but not both. #branch = develop -hash = 12f932f +# Specify either a branch name or a hash but not both. +hash = 12f932f local_path = arl_nexus required = True @@ -39,9 +39,10 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = b4d37a8 +hash = b4d37a8 local_path = AQM-utils required = True [externals_description] schema_version = 1.0.0 +