diff --git a/FV3GFSwfm/cycGSDnoah/INFO b/FV3GFSwfm/cycGSDnoah/INFO new file mode 100644 index 0000000000..4ab6cbb3c1 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/INFO @@ -0,0 +1,51 @@ +17Sep20 +- use RESTART directories + ** get floating overflow error + +22Sep20 + ** add print statements to sorc/fv3gfs.fd/FV3/atmos_cubed_sphere/tools/fv_restart.F90 + error in water_nc variable + + JKH: ============== + JKH: qname=water_nc + JKH: qmin= 1.0000000E+30 + JKH: qmax= 1.0000000E+30 +forrtl: error (72): floating overflow +Image PC Routine Line Source +global_fv3gfs_ccp 0000000007FE53FE Unknown Unknown Unknown +libpthread-2.17.s 00002AED36F2A5F0 Unknown Unknown Unknown +global_fv3gfs_ccp 0000000000D1690D fv_grid_utils_mod 2992 fv_grid_utils.F90 +global_fv3gfs_ccp 00000000020A8AA5 fv_restart_mod_mp 1500 fv_restart.F90 +global_fv3gfs_ccp 000000000205F060 fv_restart_mod_mp 733 fv_restart.F90 +global_fv3gfs_ccp 00000000008715DA atmosphere_mod_mp 405 atmosphere.F90 +global_fv3gfs_ccp 00000000006551F2 atmos_model_mod_m 504 atmos_model.F90 +global_fv3gfs_ccp 0000000000623E98 module_fcst_grid_ 380 module_fcst_grid_comp.F90 + + ** create ICs using chgres_cube + get floating invalid error in module_write_netcdf_parallel + + PASS: fcstRUN phase 2, na = 0 time is 4.17178702354431 + in write grid comp, nf_hours= 0 + in wrt run, nf_hours= 0 4 0 nseconds_num= + 0 1 FBCount= 3 cfhour=000 + ichunk2d,jchunk2d 1536 20 + ichunk3d,jchunk3d,kchunk3d 1536 20 64 +forrtl: error (65): floating invalid +Image PC Routine Line Source +global_fv3gfs_ccp 0000000007FE53FE Unknown Unknown Unknown +libpthread-2.17.s 00002B33FD86A5F0 Unknown Unknown Unknown +global_fv3gfs_ccp 0000000002CEEAA1 module_write_netc 383 module_write_netcdf_parallel.F90 +global_fv3gfs_ccp 0000000002AF17E5 module_wrt_grid_c 1533 module_wrt_grid_comp.F90 + + ** change config.fcst to see what happens when you use netcdf instead of netcdf_parallel + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + + same error (atmf file, sfc file) + try changing first one + + export OUTPUT_FILETYPES=" 'netcdf' 'netcdf_parallel' " + + then get same 'divide by zero' error in cu_gf_sh.F90 + + !-convective-scale velocity w* + zws(i) = max(0.,flux_tun(i)*0.41*buo_flux*zo(i,2)*g/t(i,1)) diff --git a/FV3GFSwfm/cycGSDnoah/aeroics.xml b/FV3GFSwfm/cycGSDnoah/aeroics.xml new file mode 100644 index 0000000000..ced9686d06 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/aeroics.xml @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1:ppn=3"> + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &EDATE; &INTERVAL; + + + + &JOBS_DIR;/aeroic.sh + + &PSLOT;_gdasaeroic_@H + &ACCOUNT; + &QUEUE_AEROIC_GFS; + &RESOURCES_AEROIC_GFS; + &WALLTIME_AEROIC_GFS; + &NATIVE_AEROIC_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gdasaeroic.log + + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDATE@Y@m@d@H + AEROCASE&CASE; + FV3ICS_DIR&ICSDIR;/&CDUMP;.@Y@m@d/@H/INPUT/ + FIXfv3&FIXDIR;/fix_fv3 + + + + &ICSDIR;/&CDUMP;.@Y@m@d/@H/INPUT/gfs_data.tile6.nc + &ICSDIR;/&CDUMP;.@Y@m@d/@H/INPUT/sfc_data.tile6.nc + + + + + + + + 041 042 043 044 045 046 047 048 049 050 051 052 053 054 055 056 057 058 059 060 061 062 063 064 065 066 067 068 069 070 071 072 073 074 075 076 077 078 079 080 + + + + &JOBS_DIR;/aeroic.sh + + &PSLOT;_enkfaeroic_@H + &ACCOUNT; + &QUEUE_AEROIC_GFS; + &RESOURCES_AEROIC_GFS; + &WALLTIME_AEROIC_GFS; + &NATIVE_AEROIC_GFS; + + &ROTDIR;/logs/@Y@m@d@H/enkfaeroic#member#.log + + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDATE@Y@m@d@H + AEROCASE&ECASE; + FV3ICS_DIR&ICSDIR;/&ECDUMP;.@Y@m@d/@H/mem#member#/INPUT/ + FIXfv3&FIXDIR;/fix_fv3 + + + + &ICSDIR;/&ECDUMP;.@Y@m@d/@H/mem#member#/INPUT/gfs_data.tile6.nc + &ICSDIR;/&ECDUMP;.@Y@m@d/@H/mem#member#/INPUT/sfc_data.tile6.nc + + + + + + + + diff --git a/FV3GFSwfm/cycGSDnoah/config.anal b/FV3GFSwfm/cycGSDnoah/config.anal new file mode 100755 index 0000000000..35ff5e8299 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.anal @@ -0,0 +1,130 @@ +#!/bin/ksh -x + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. $EXPDIR/config.resources anal + +if [ $DONST = "YES" ]; then + . $EXPDIR/config.nsst +fi + +if [[ "$CDUMP" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="NO" +fi + +export npe_gsi=$npe_anal + +if [[ "$CDUMP" == "gfs" ]] ; then + export npe_gsi=$npe_anal_gfs + export nth_anal=$nth_anal_gfs +fi + +# Set parameters specific to L127 +if [ $LEVS = "128" ]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,$SETUP" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Use experimental dumps in GFS v16 parallels +export ABIBF="/dev/null" +if [[ "$CDATE" -ge "2019022800" ]] ; then + export ABIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" + if [[ "$CDATE" -ge "2019111000" && "$CDATE" -le "2020052612" ]]; then + export ABIBF="$DMPDIR/${CDUMP}y.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" + fi +fi + +export AHIBF="/dev/null" +if [[ "$CDATE" -ge "2019042300" ]]; then + export AHIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.ahicsr.tm00.bufr_d" +fi + + +# Adjust data usage for GFS v16 parallels +# +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +# +# Set default values +export CONVINFO=$FIXgsi/global_convinfo.txt +export OZINFO=$FIXgsi/global_ozinfo.txt +export SATINFO=$FIXgsi/global_satinfo.txt +export OBERROR=$FIXgsi/prepobs_errtable.global + + +# Set convinfo and prepobs.errtable.global for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 +fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps +if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 +fi + +# Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations +if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "2020052612" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020040718 +fi + +# NOTE: +# As of 2020052612, gfsv16_historical/global_convinfo.txt.2020052612 is +# identical to ../global_convinfo.txt. Thus, the logic below is not +# needed at this time. +# Assimilate COSMIC-2 GPS +##if [[ "$CDATE" -ge "2020052612" && "$CDATE" -lt "YYYYMMDDHH" ]]; then +## export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020052612 +##fi + + +# Turn off assmilation of OMPS during period of bad data +if [[ "$CDATE" -ge "2020011600" && "$CDATE" -lt "2020011806" ]]; then + export OZINFO=$FIXgsi/gfsv16_historical/global_ozinfo.txt.2020011600 +fi + + +# Set satinfo for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019021900 +fi + +# Turn on assimilation of Metop-C AMSUA and MHS +if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020022012" ]]; then + export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019110706 +fi + +# NOTE: +# As of 2020022012, gfsv16_historical/global_satinfo.txt.2020022012 is +# identical to ../global_satinfo.txt. Thus, the logic below is not +# needed at this time +# +# Turn off assmilation of all Metop-A MHS +## if [[ "$CDATE" -ge "2020022012" && "$CDATE" -lt "YYYYMMDDHH" ]]; then +## export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2020022012 +## fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/cycGSDnoah/config.analcalc b/FV3GFSwfm/cycGSDnoah/config.analcalc new file mode 100755 index 0000000000..5866ce5ac6 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.analcalc @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/cycGSDnoah/config.analdiag b/FV3GFSwfm/cycGSDnoah/config.analdiag new file mode 100755 index 0000000000..285e614d37 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.analdiag @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/cycGSDnoah/config.arch b/FV3GFSwfm/cycGSDnoah/config.arch new file mode 100755 index 0000000000..fe4363613a --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.arch @@ -0,0 +1,25 @@ +#!/bin/ksh -x + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/cycGSDnoah/config.awips b/FV3GFSwfm/cycGSDnoah/config.awips new file mode 100755 index 0000000000..76a9c21536 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.awips @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/cycGSDnoah/config.base b/FV3GFSwfm/cycGSDnoah/config.base new file mode 100644 index 0000000000..fa5aee0797 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.base @@ -0,0 +1,289 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_ARCH="service" + +# Project to use in mass store: +HPSS_PROJECT=fim + +# Directories relative to installation areas: +export HOMEgfs=/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters +export NWPROD="/scratch1/NCEPDEV/global/glopara/nwpara" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" +export RTMFIX=$CRTM_FIX + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/$USER" +export PTMP="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp4/$USER" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +#### CCPP Suite +#### export CCPP_SUITE="FV3_GSD_v0" # GSDsuite +export CCPP_SUITE="FV3_GSD_noah" # GSDsuite + NOAH LSM +#### export CCPP_SUITE="FV3_GFS_v16beta" # EMC v16beta + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/gfsv16b/obsproc_prep.iss70457.netcdfhistory" +export HOMEobsproc_network="$BASE_GIT/obsproc/gfsv16b/obsproc_global.iss71402.supportGFSv16" +export HOMEobsproc_global=$HOMEobsproc_network +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2020081918 +export EDATE=2020082100 +export assim_freq=6 +export PSLOT="cycGSDnoah" +export EXPDIR="/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/$PSLOT" +export ROTDIR="/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/$PSLOT" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export RUNDIR="$STMP/RUNDIRS/$PSLOT" +export DATAROOT="$RUNDIR/$CDATE/$CDUMP" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ATARDIR="/BMC/$HPSS_PROJECT/1year/GSD_phys_tst_jul2019/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export jlogfile="${EXPDIR}/logs/jlogfile" +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF="NO" +export SENDCOM="NO" +export SENDDBN="NO" +export SENDDBN_NTC="NO" +export SENDSDM="NO" + +# Resolution specific parameters +export LEVS=65 ## JKH +export CASE="C768" +export CASE_ENKF="C384" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" ]] ; then + export FHCYC=0 + else + export FHCYC=24 + fi +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=6 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="netcdf" +export WRITE_DOPOST=".true." + +# suffix options depending on file format +if [ $OUTPUT_FILE = "netcdf" ]; then + export SUFFIX=".nc" + export NEMSIO_IN=".false." + export NETCDF_IN=".true." +else + export SUFFIX=".nemsio" + export NEMSIO_IN=".true." + export NETCDF_IN=".false." +fi + +# IAU related parameters +export DOIAU="NO" # Enable 4DIAU for control with 3 increments ## JKH +export IAUFHRS="3,6,9" +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF="NO" # Enable 4DIAU for EnKF ensemble ## JKH +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" ## JKH +export gldas_cyc=00 + +# run wave component +export DO_WAVE="NO" ## JKH +export WAVE_CDUMP="both" + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imp_physics=8 +else + export imp_physics=11 +fi + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=40 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run MET+ jobs ## JKH + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/cycGSDnoah/config.base.emc.dyn b/FV3GFSwfm/cycGSDnoah/config.base.emc.dyn new file mode 100755 index 0000000000..61944e9797 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.base.emc.dyn @@ -0,0 +1,290 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_ARCH="@QUEUE_ARCH@" + +# Project to use in mass store: +HPSS_PROJECT=fim + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters +export NWPROD="@NWPROD@" +export DMPDIR="@DMPDIR@" +export RTMFIX=$CRTM_FIX + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +#### CCPP Suite +#### export CCPP_SUITE="FV3_GSD_v0" # GSDsuite +export CCPP_SUITE="FV3_GSD_noah" # GSDsuite + NOAH LSM +#### export CCPP_SUITE="FV3_GFS_v16beta" # EMC v16beta + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/gfsv16b/obsproc_prep.iss70457.netcdfhistory" +export HOMEobsproc_network="$BASE_GIT/obsproc/gfsv16b/obsproc_global.iss71402.supportGFSv16" +export HOMEobsproc_global=$HOMEobsproc_network +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/$PSLOT" +export ROTDIR="@ROTDIR@/$PSLOT" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export RUNDIR="$STMP/RUNDIRS/$PSLOT" +export DATAROOT="$RUNDIR/$CDATE/$CDUMP" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ICSDIR="@ICSDIR@" +export ATARDIR="/BMC/$HPSS_PROJECT/1year/GSD_phys_tst_jul2019/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export jlogfile="${EXPDIR}/logs/jlogfile" +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF="NO" +export SENDCOM="NO" +export SENDDBN="NO" +export SENDDBN_NTC="NO" +export SENDSDM="NO" + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" ]] ; then + export FHCYC=0 + else + export FHCYC=24 + fi +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=6 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="netcdf" +export WRITE_DOPOST=".true." + +# suffix options depending on file format +if [ $OUTPUT_FILE = "netcdf" ]; then + export SUFFIX=".nc" + export NEMSIO_IN=".false." + export NETCDF_IN=".true." +else + export SUFFIX=".nemsio" + export NEMSIO_IN=".true." + export NETCDF_IN=".false." +fi + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF="YES" # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS=YES +export gldas_cyc=00 + +# run wave component +export DO_WAVE=YES +export WAVE_CDUMP="both" + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imp_physics=8 +else + export imp_physics=11 +fi + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=@NMEM_ENKF@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run MET+ jobs ## JKH + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/cycGSDnoah/config.base.nco.static b/FV3GFSwfm/cycGSDnoah/config.base.nco.static new file mode 100755 index 0000000000..4e66d41631 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.base.nco.static @@ -0,0 +1,238 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="WCOSS_DELL_P3" + +# EMC parallel or NCO production +export RUN_ENVIR="nco" + +# Account, queue, etc. +export ACCOUNT="FV3GFS-T2O" +export QUEUE="prod" +export QUEUE_ARCH="dev_transfer" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters + +export NWPROD="/gpfs/dell1/nco/ops/nwprod" +export DMPDIR="/gpfs/dell3/emc/global/dump" +export RTMFIX=$CRTM_FIX + + +# Machine specific paths used everywhere + +# USER specific paths +# export HOMEDIR="/gpfs/dell2/emc/modeling/noscrub/$USER" + export HOMEDIR=$EXPDIR/HOMEDIR +# export STMP="/gpfs/dell3/stmp/$USER" + export STMP=$DATAROOT +# export PTMP="/gpfs/dell3/ptmp/$USER" + export PTMP=$ROTDIR +# export NOSCRUB="/gpfs/dell2/emc/modeling/noscrub/$USER" + export NOSCRUB=$EXPDIR/NOSCRUB + + # Base directories for various builds + export BASE_GIT="/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git" + + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" +export DO_GEMPAK="YES" +export DO_AWIPS="YES" +export WAFSF="YES" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/obsproc_prep.iss-46886.fv3gfs" +export HOMEobsproc_network="$BASE_GIT/obsproc/obsproc_global.iss-46886.fv3gfs" +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2018080600 +export EDATE=2039123100 +export assim_freq=6 +export PSLOT="rtecffv3" +export EXPDIR="$EXPDIR" +export ROTDIR="$ROTDIR" +export ROTDIR_DUMP="YES" +export DUMP_SUFFIX="" +export RUNDIR="$DATAROOT" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ICSDIR="/gpfs/dell2/ptmp/$USER/FV3ICS" +export ATARDIR="/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDDBN=${SENDDBN:-"YES"} +export SENDDBN_NTC=${SENDDBN_NTC:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="C384" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# GFS cycle info +export gfs_cyc=4 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=384 +export FHMAX_GFS_06=384 +export FHMAX_GFS_12=384 +export FHMAX_GFS_18=384 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=3 +export FHMAX_HF_GFS=120 +export FHOUT_HF_GFS=1 + +# frequency for saving restart files. set to 6,12,24,48 etc +export restart_interval_gfs=12 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="nemsio" +export WRITE_DOPOST=".true." + +# IAU related parameters +export DOIAU="NO" +export IAUFHRS=6 +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export DOIAU_ENKF="NO" +export IAUFHRS_ENKF=6 +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# run GLDAS to spin up land ICs +export DO_GLDAS=YES +export gldas_cyc=00 + +# run wave component +export DO_WAVE=YES + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=11 + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=80 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".false." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst + +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'delz_inc','clwmr_inc','icmr_inc'" + + +# Archiving options +export DELETE_COM_IN_ARCHIVE_JOB=YES +export HPSSARCH="NO" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=1 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + + +echo "END: config.base" diff --git a/FV3GFSwfm/cycGSDnoah/config.earc b/FV3GFSwfm/cycGSDnoah/config.earc new file mode 100755 index 0000000000..7cb1de235f --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.earc @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/cycGSDnoah/config.ecen b/FV3GFSwfm/cycGSDnoah/config.ecen new file mode 100755 index 0000000000..c9609e3ff8 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.ecen @@ -0,0 +1,21 @@ +#!/bin/ksh -x + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/cycGSDnoah/config.echgres b/FV3GFSwfm/cycGSDnoah/config.echgres new file mode 100755 index 0000000000..cbf176c92f --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.echgres @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/cycGSDnoah/config.ediag b/FV3GFSwfm/cycGSDnoah/config.ediag new file mode 100755 index 0000000000..192b5d0b48 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.ediag @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/cycGSDnoah/config.efcs b/FV3GFSwfm/cycGSDnoah/config.efcs new file mode 100755 index 0000000000..5f7a5bf169 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.efcs @@ -0,0 +1,94 @@ +#!/bin/ksh -x + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# Source model specific information that is resolution dependent +. $EXPDIR/config.fv3 $CASE_ENKF + +# Get task specific resources +. $EXPDIR/config.resources efcs + +export npe_fv3=$npe_efcs + +if [ $QUILTING = ".true." ]; then + export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) + export npe_efcs=$npe_fv3 +fi + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="6 -1" + if [[ "$SDATE" = "$CDATE" ]]; then export restart_interval="3 -1"; fi +fi + +export OUTPUT_FILETYPES="$OUTPUT_FILE" +if [[ "$OUTPUT_FILE" == "netcdf" ]]; then + export ichunk2d=0; export jchunk2d=0 + export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 + RESTILE=`echo $CASE_ENKF |cut -c 2-` + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + if [ $RESTILE -ge 384 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + export ichunk2d=$((4*RESTILE)) + export jchunk2d=$((2*RESTILE)) + export ichunk3d=$((4*RESTILE)) + export jchunk3d=$((2*RESTILE)) + export kchunk3d=1 + else + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi + if [[ "$machine" == "HERA" ]]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + if [ $RESTILE -le 192 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi +fi + +# wave model +export cplwav=.false. + +echo "END: config.efcs" diff --git a/FV3GFSwfm/cycGSDnoah/config.eobs b/FV3GFSwfm/cycGSDnoah/config.eobs new file mode 100755 index 0000000000..8fa99c10fb --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.eobs @@ -0,0 +1,31 @@ +#!/bin/ksh -x + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/cycGSDnoah/config.epos b/FV3GFSwfm/cycGSDnoah/config.epos new file mode 100755 index 0000000000..441a1ff995 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.epos @@ -0,0 +1,20 @@ +#!/bin/ksh -x + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/cycGSDnoah/config.esfc b/FV3GFSwfm/cycGSDnoah/config.esfc new file mode 100755 index 0000000000..53cbb09175 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.esfc @@ -0,0 +1,19 @@ +#!/bin/ksh -x + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/cycGSDnoah/config.eupd b/FV3GFSwfm/cycGSDnoah/config.eupd new file mode 100755 index 0000000000..0e9d42e093 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.eupd @@ -0,0 +1,34 @@ +#!/bin/ksh -x + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/cycGSDnoah/config.fcst b/FV3GFSwfm/cycGSDnoah/config.fcst new file mode 120000 index 0000000000..feaf70f53b --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.fcst @@ -0,0 +1 @@ +config.fcst_aug13 \ No newline at end of file diff --git a/FV3GFSwfm/cycGSDnoah/config.fcst_aug13 b/FV3GFSwfm/cycGSDnoah/config.fcst_aug13 new file mode 100755 index 0000000000..a07daf2599 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.fcst_aug13 @@ -0,0 +1,351 @@ +#!/bin/ksh -x + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Source model specific information that is resolution dependent +. $EXPDIR/config.fv3 $CASE + +# Get task specific resources +. $EXPDIR/config.resources fcst + +if [ $DONST = "YES" ]; then + . $EXPDIR/config.nsst +fi + +export FORECASTSH="$HOMEgfs/scripts/exglobal_fcst_nemsfv3gfs.sh" +export FCSTEXECDIR="$HOMEgfs/exec" +#export FCSTEXEC="global_fv3gfs.x" +export FCSTEXEC="global_fv3gfs_ccpp.x" +export npe_fv3=$npe_fcst + +if [[ "$CDUMP" == "gfs" ]] ; then + export npe_fv3=$npe_fcst_gfs + export layout_x=$layout_x_gfs + export layout_y=$layout_y_gfs + export WRITE_GROUP=$WRITE_GROUP_GFS + export WRTTASK_PER_GROUP=$WRTTASK_PER_GROUP_GFS +fi + +if [ $QUILTING = ".true." ]; then + export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) + export npe_fcst=$npe_fv3 + export npe_fcst_gfs=$(echo " $npe_fcst_gfs + $WRITE_GROUP_GFS * $WRTTASK_PER_GROUP_GFS" | bc) +fi + +if [ $DO_WAVE = "YES" ] ; then + export npe_fcst=$((npe_fcst + npe_wav)) + if [ "$WAVE_CDUMP" = "gfs" -o "$WAVE_CDUMP" = "both" ]; then + export npe_fcst_gfs=$((npe_fcst_gfs + npe_wav_gfs)) + if [ "$CDUMP" = "gfs" ]; then npe_wav=$npe_wav_gfs ; fi + fi +fi + +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +export do_ugwp=".false." +export do_tofd=".true." +export launch_level=$(echo "$LEVS/2.35" |bc) + +# Sponge layer settings for L127 +if [ $LEVS = "128" ]; then + if [ "$CDUMP" = "gdas" ]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 + fi + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export n_sponge=42 + fi +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".true." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi + +tbf="" +if [ $satmedmf = ".true." ]; then tbf="_satmedmf" ; fi + +# Land surface model. (3--RUCLSM, landice=F;) (2--NoahMP, landice=F); (1--Noah, landice=T) +if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export lsm=3 + export lsoil_lsm=9 +else + export lsm=1 +fi + +if [ $lsm -eq 2 -o $lsm -eq 3 ]; then + export lheatstrg=".false." + export landice=".false." +else + export lheatstrg=".false." + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export landice=".false." ##JKH + else + export landice=".true." + fi +fi + +# Radiation options +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export IAER=111 ## JKH + export iovr_lw=1 ## JKH + export iovr_sw=1 ## JKH + export icliq_sw=1 ## JKH +else + export IAER=5111 ;#spectral band mapping method for aerosol optical properties + export iovr_lw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export iovr_sw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export icliq_sw=2 ;#cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +fi + +# CCPP configuration +export output_1st_tstep_rst=".false." #JKH + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + export imfshalcnv=2 +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [ $imp_physics -eq 99 ]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}" + export nwat=2 + +elif [ $imp_physics -eq 6 ]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}" + export nwat=6 + +elif [ $imp_physics -eq 8 ]; then # Thompson + export nwat=6 + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gsd" + export ltaerosol=.true. + export lradar=.true. + + ## GSD namelist changes + export cal_pre=".false." + export random_clds=".false." + export effr_in=.true. + export ttendlim=0.005 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + else + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson${tbf}" + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + +else + echo "Unknown microphysics option, ABORT!" + +fi +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +export OUTPUT_FILETYPES="$OUTPUT_FILE" +if [[ "$OUTPUT_FILE" == "netcdf" ]]; then + export ichunk2d=0; export jchunk2d=0 + export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 + RESTILE=`echo $CASE |cut -c 2-` + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + if [ $RESTILE -ge 768 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + export ichunk3d=$((4*RESTILE)) + export jchunk3d=$((2*RESTILE)) + export kchunk3d=1 + else + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi + if [[ "$machine" == "HERA" ]]; then + #JKHexport OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + export OUTPUT_FILETYPES=" 'netcdf' 'netcdf' " + if [ $RESTILE -le 192 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi +fi + + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" == "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [ $DOIAU = "YES" ]; then + export restart_interval="6 9" + if [[ "$SDATE" = "$CDATE" ]]; then export restart_interval="3 6"; fi + fi + + # Choose coupling with wave + if [ $DO_WAVE = "YES" ]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" == "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd_ruc" + elif [ $CCPP_SUITE = "FV3_GSD_noah" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_orig" + fi + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [ $restart_interval_gfs -le 0 ]; then + export restart_interval=0 + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + + # Choose coupling with wave + if [ $DO_WAVE = "YES" -a "$WAVE_CDUMP" != "gdas" ]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + export io_layout="4,4" + + # Set number of layers from the top over + # which two delta-Z filter is applied + export n_sponge=42 + + # Debug load balancing + #export KEEPDATA="YES" + #export ESMF_RUNTIME_PROFILE=ON + #export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY + +fi + + +# Regrid tiles to global Gaussian grid in NEMSIO +export REGRID_NEMSIO_SH="$HOMEgfs/ush/fv3gfs_regrid_nemsio.sh" +if [ $DONST = YES ]; then + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da.txt" +else + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da_nonsst.txt" +fi + +# Remap tiles to global latlon grid in NetCDF +export REMAPSH="$HOMEgfs/ush/fv3gfs_remap.sh" +export master_grid="0p25deg" # 1deg 0p5deg 0p25deg 0p125deg etc +export npe_remap=$((npe_fcst < 240 ? npe_fcst : 240)) + +# Global latlon NetCDF to nemsio utility parameters +export NC2NEMSIOSH="$HOMEgfs/ush/fv3gfs_nc2nemsio.sh" + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/cycGSDnoah/config.fcst_jun29 b/FV3GFSwfm/cycGSDnoah/config.fcst_jun29 new file mode 100755 index 0000000000..14d1ff80aa --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.fcst_jun29 @@ -0,0 +1,351 @@ +#!/bin/ksh -x + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Source model specific information that is resolution dependent +. $EXPDIR/config.fv3 $CASE + +# Get task specific resources +. $EXPDIR/config.resources fcst + +if [ $DONST = "YES" ]; then + . $EXPDIR/config.nsst +fi + +export FORECASTSH="$HOMEgfs/scripts/exglobal_fcst_nemsfv3gfs.sh" +#JKHexport FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXECDIR="/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/emc_feature_ccpp/sorc/ufs-weather-model_29Jun/NEMS/exe" +#export FCSTEXEC="global_fv3gfs.x" +export FCSTEXEC="global_fv3gfs_ccpp.x" +export npe_fv3=$npe_fcst + +if [[ "$CDUMP" == "gfs" ]] ; then + export npe_fv3=$npe_fcst_gfs + export layout_x=$layout_x_gfs + export layout_y=$layout_y_gfs + export WRITE_GROUP=$WRITE_GROUP_GFS + export WRTTASK_PER_GROUP=$WRTTASK_PER_GROUP_GFS +fi + +if [ $QUILTING = ".true." ]; then + export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) + export npe_fcst=$npe_fv3 + export npe_fcst_gfs=$(echo " $npe_fcst_gfs + $WRITE_GROUP_GFS * $WRTTASK_PER_GROUP_GFS" | bc) +fi + +if [ $DO_WAVE = "YES" ] ; then + export npe_fcst=$((npe_fcst + npe_wav)) + if [ "$WAVE_CDUMP" = "gfs" -o "$WAVE_CDUMP" = "both" ]; then + export npe_fcst_gfs=$((npe_fcst_gfs + npe_wav_gfs)) + if [ "$CDUMP" = "gfs" ]; then npe_wav=$npe_wav_gfs ; fi + fi +fi + +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +export do_ugwp=".false." +export do_tofd=".true." +export launch_level=$(echo "$LEVS/2.35" |bc) + +# Sponge layer settings for L127 +if [ $LEVS = "128" ]; then + if [ "$CDUMP" = "gdas" ]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 + fi + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export n_sponge=42 + fi +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".true." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi + +tbf="" +if [ $satmedmf = ".true." ]; then tbf="_satmedmf" ; fi + +# Land surface model. (3--RUCLSM, landice=F;) (2--NoahMP, landice=F); (1--Noah, landice=T) +if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export lsm=3 + export lsoil_lsm=9 +else + export lsm=1 +fi + +if [ $lsm -eq 2 -o $lsm -eq 3 ]; then + export lheatstrg=".false." + export landice=".false." +else + export lheatstrg=".false." + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export landice=".false." ##JKH + else + export landice=".true." + fi +fi + +# Radiation options +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export IAER=111 ## JKH + export iovr_lw=1 ## JKH + export iovr_sw=1 ## JKH + export icliq_sw=1 ## JKH +else + export IAER=5111 ;#spectral band mapping method for aerosol optical properties + export iovr_lw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export iovr_sw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export icliq_sw=2 ;#cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +fi + +# CCPP configuration +export output_1st_tstep_rst=".false." #JKH + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + export imfshalcnv=2 +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [ $imp_physics -eq 99 ]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}" + export nwat=2 + +elif [ $imp_physics -eq 6 ]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}" + export nwat=6 + +elif [ $imp_physics -eq 8 ]; then # Thompson + export nwat=6 + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gsd" + export ltaerosol=.true. + export lradar=.true. + + ## GSD namelist changes + export cal_pre=".false." + export random_clds=".false." + export effr_in=.true. + export ttendlim=0.005 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + else + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson${tbf}" + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + +else + echo "Unknown microphysics option, ABORT!" + +fi +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +export OUTPUT_FILETYPES="$OUTPUT_FILE" +if [[ "$OUTPUT_FILE" == "netcdf" ]]; then + export ichunk2d=0; export jchunk2d=0 + export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 + RESTILE=`echo $CASE |cut -c 2-` + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + if [ $RESTILE -ge 768 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + export ichunk3d=$((4*RESTILE)) + export jchunk3d=$((2*RESTILE)) + export kchunk3d=1 + else + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi + if [[ "$machine" == "HERA" ]]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + if [ $RESTILE -le 192 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi +fi + + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" == "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [ $DOIAU = "YES" ]; then + export restart_interval="6 9" + if [[ "$SDATE" = "$CDATE" ]]; then export restart_interval="3 6"; fi + fi + + # Choose coupling with wave + if [ $DO_WAVE = "YES" ]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" == "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd_ruc" + elif [ $CCPP_SUITE = "FV3_GSD_noah" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_orig" + fi + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [ $restart_interval_gfs -le 0 ]; then + export restart_interval=0 + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + + # Choose coupling with wave + if [ $DO_WAVE = "YES" -a "$WAVE_CDUMP" != "gdas" ]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + export io_layout="4,4" + + # Set number of layers from the top over + # which two delta-Z filter is applied + export n_sponge=42 + + # Debug load balancing + #export KEEPDATA="YES" + #export ESMF_RUNTIME_PROFILE=ON + #export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY + +fi + + +# Regrid tiles to global Gaussian grid in NEMSIO +export REGRID_NEMSIO_SH="$HOMEgfs/ush/fv3gfs_regrid_nemsio.sh" +if [ $DONST = YES ]; then + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da.txt" +else + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da_nonsst.txt" +fi + +# Remap tiles to global latlon grid in NetCDF +export REMAPSH="$HOMEgfs/ush/fv3gfs_remap.sh" +export master_grid="0p25deg" # 1deg 0p5deg 0p25deg 0p125deg etc +export npe_remap=$((npe_fcst < 240 ? npe_fcst : 240)) + +# Global latlon NetCDF to nemsio utility parameters +export NC2NEMSIOSH="$HOMEgfs/ush/fv3gfs_nc2nemsio.sh" + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/cycGSDnoah/config.fv3 b/FV3GFSwfm/cycGSDnoah/config.fv3 new file mode 100755 index 0000000000..7006374d36 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.fv3 @@ -0,0 +1,156 @@ +#!/bin/ksh -x + +########## config.fv3 ########## +# FV3 model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3 for a given resolution +# User can over-ride after sourcing this config file + +if [ $# -ne 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "C48 C96 C192 C384 C768 C1152 C3072" + exit 1 + +fi + +case_in=$1 + +echo "BEGIN: config.fv3" + + +if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 +elif [[ "$machine" = "WCOSS_C" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "THEIA" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "JET" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "HERA" ]]; then + export npe_node_max=40 +fi + + +# (Standard) Model resolution dependent variables +case $case_in in + "C48") + export DELTIM=450 + export layout_x=2 + export layout_y=4 + export layout_x_gfs=2 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C96") + export DELTIM=450 + export layout_x=4 + export layout_y=4 + export layout_x_gfs=4 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export npe_wav=21 + export npe_wav_gfs=21 + export nth_fv3=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="8M" + ;; + "C384") + export DELTIM=240 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=6 + export layout_y_gfs=6 + export npe_wav=35 + export npe_wav_gfs=35 + export nth_fv3=1 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="16M" + ;; + "C768") + export DELTIM=225 ## JKH + export layout_x=12 + export layout_y=8 + export layout_x_gfs=16 + export layout_y_gfs=12 + export npe_wav=70 + export npe_wav_gfs=70 + export nth_fv3=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTIOBUF="32M" + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTIOBUF="48M" + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "3*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "3*$npe_node_max" |bc) + export WRTIOBUF="64M" + ;; + *) + echo "grid $case_in not supported, ABORT!" + exit 1 + ;; +esac + +echo "END: config.fv3" diff --git a/FV3GFSwfm/cycGSDnoah/config.fv3ic b/FV3GFSwfm/cycGSDnoah/config.fv3ic new file mode 100755 index 0000000000..eaed3892ea --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.fv3ic @@ -0,0 +1,19 @@ +#!/bin/ksh -x + +########## config.fv3ic ########## +# Convert GFS initial conditions into FV3 initial conditions + +echo "BEGIN: config.fv3ic" + +# Task and thread configuration +export wtime_fv3ic="00:30:00" +export npe_fv3ic=1 +export npe_node_fv3ic=1 +export nth_fv3ic=${NTHREADS_CHGRES:-24} +if [ $machine = HERA ]; then + export npe_fv3ic=4 + export npe_node_fv3ic=4 + export nth_fv3ic=1 +fi + +echo "END: config.fv3ic" diff --git a/FV3GFSwfm/cycGSDnoah/config.gempak b/FV3GFSwfm/cycGSDnoah/config.gempak new file mode 100755 index 0000000000..647f474e90 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.gempak @@ -0,0 +1,13 @@ +#!/bin/ksh -x + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +export GEMPAKSH=$HOMEgfs/jobs/JGFS_GEMPAK + +echo "END: config.gempak" diff --git a/FV3GFSwfm/cycGSDnoah/config.getic b/FV3GFSwfm/cycGSDnoah/config.getic new file mode 100755 index 0000000000..a754454264 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.getic @@ -0,0 +1,20 @@ +#!/bin/ksh -x + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +# We should just be supporting the OPSGFS only +export ics_from="opsgfs" # initial conditions from opsgfs or pargfs + +# Provide a parallel experiment name and path to HPSS archive +if [ $ics_from = "pargfs" ]; then + export parexp="prnemsrn" + export HPSS_PAR_PATH="/5year/NCEPDEV/emc-global/emc.glopara/WCOSS_C/$parexp" +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/cycGSDnoah/config.gldas b/FV3GFSwfm/cycGSDnoah/config.gldas new file mode 100755 index 0000000000..456d205c9b --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.gldas @@ -0,0 +1,16 @@ +#!/bin/ksh -x + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR + + +echo "END: config.gldas" diff --git a/FV3GFSwfm/cycGSDnoah/config.metp b/FV3GFSwfm/cycGSDnoah/config.metp new file mode 100755 index 0000000000..aaea79db62 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.metp @@ -0,0 +1,69 @@ +#!/bin/ksh -x + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. $EXPDIR/config.resources metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus, Verify grid-to-grid, and/or grid-to-obs, and/or precipitation options +#---------------------------------------------------------- + +if [ "$CDUMP" = "gfs" ] ; then + if [ $RUN_GRID2GRID_STEP1 = "YES" -o $RUN_GRID2OBS_STEP1 = "YES" -o $RUN_PRECIP_STEP1 = "YES" ]; then + export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd + export VERIF_GLOBALSH=$HOMEverif_global/ush/run_verif_global_in_global_workflow.sh + ## INPUT DATA SETTINGS + export model_list=$PSLOT + export model_data_dir_list=$ARCDIR/.. + export model_fileformat_list="pgbf{lead?fmt=%H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" + export model_hpssdir_list=$ATARDIR/.. + export get_data_from_hpss="NO" + export hpss_walltime="10" + ## OUTPUT SETTINGS + export OUTPUTROOT=$RUNDIR/$CDUMP/$CDATE/vrfy/metplus_exp + export model_arch_dir_list=$ARCDIR/.. + export make_met_data_by="VALID" + export gather_by="VSDB" + ## DATE SETTINGS + export VRFYBACK_HRS="24" + ## METPLUS SETTINGS + export METplus_verbosity="INFO" + export MET_verbosity="2" + export log_MET_output_to_METplus="yes" + ## FORECAST VERIFICATION SETTINGS + export fhr_min=$FHMIN_GFS + export fhr_max=$FHMAX_GFS + # GRID-TO-GRID STEP 1 + export g2g1_type_list="anom pres sfc" + export g2g1_anl_name="self_anl" + export g2g1_anl_fileformat_list="pgbanl.gfs.{valid?fmt=%Y%m%d%H}.grib2" + export g2g1_grid="G002" + # GRID-TO-OBS STEP 1 + export g2o1_type_list="upper_air conus_sfc" + export g2o1_obtype_upper_air="ADPUPA" + export g2o1_grid_upper_air="G003" + export g2o1_fhr_out_upper_air="6" + export g2o1_obtype_conus_sfc="ONLYSF ADPUPA" + export g2o1_grid_conus_sfc="G104" + export g2o1_fhr_out_conus_sfc="3" + export g2o1_prepbufr_data_runhpss="YES" + # PRECIP STEP 1 + export precip1_obtype="ccpa" + export precip1_accum_length="24" + export precip1_model_bucket_list="06" + export precip1_model_varname_list="APCP" + export precip1_model_fileformat_list="pgbf{lead?fmt=%H}.gfs.{init?fmt=%Y%m%d%H}.grib2" + export precip1_grid="G211" + fi +fi + +echo "END: config.metp" diff --git a/FV3GFSwfm/cycGSDnoah/config.nsst b/FV3GFSwfm/cycGSDnoah/config.nsst new file mode 100755 index 0000000000..0bf2792474 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.nsst @@ -0,0 +1,45 @@ +#!/bin/ksh -x + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export NST_SPINUP=1 +else + export NST_SPINUP=0 +fi +#if [[ "$CDATE" = $SDATE ]]; then +# export NST_SPINUP=1 +#fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export NST_RESV=1 +else + export NST_RESV=0 +fi + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export ZSEA2=5 +else + export ZSEA2=0 +fi + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/cycGSDnoah/config.post b/FV3GFSwfm/cycGSDnoah/config.post new file mode 100755 index 0000000000..4e33da0cbe --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.post @@ -0,0 +1,46 @@ +#!/bin/ksh -x + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=binarynemsiompiio +if [ $OUTPUT_FILE = "netcdf" ]; then + export MODEL_OUT_FORM=netcdfpara +fi + +# Post driver job that calls gfs_nceppost.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_NCEPPOST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_nceppost.sh" +export POSTGPEXEC="$HOMEpost/exec/gfs_ncep_post" +export GOESF=YES # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [ $machine = "WCOSS_DELL_P3" ]; then + export npe_dwn=28 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/cycGSDnoah/config.postsnd b/FV3GFSwfm/cycGSDnoah/config.postsnd new file mode 100755 index 0000000000..9c68e5284e --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.postsnd @@ -0,0 +1,13 @@ +#!/bin/ksh -x + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_POSTSND + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/cycGSDnoah/config.prep b/FV3GFSwfm/cycGSDnoah/config.prep new file mode 100755 index 0000000000..c9dbe8743a --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.prep @@ -0,0 +1,55 @@ +#!/bin/ksh -x + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump + + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/extropcy_qc_reloc.sh" +export SENDCOM=YES + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/fix_am +export USHRELO=${HOMERELO}/ush + + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 +fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +#NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +##if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +## export PRVT=$EXPDIR/prepobs_errtable.global +##fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/cycGSDnoah/config.prepbufr b/FV3GFSwfm/cycGSDnoah/config.prepbufr new file mode 100755 index 0000000000..c90a732c41 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.prepbufr @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.prepbufr ########## +# PREPBUFR specific configuration + +echo "BEGIN: config.prepbufr" + +# Get task specific resources +. $EXPDIR/config.resources prepbufr + +# Set variables + +if [ $machine = "HERA" ]; then + export GESROOT=/scratch1/NCEPDEV/rstprod # set by module prod_envir on WCOSS_C +fi + +echo "END: config.prepbufr" diff --git a/FV3GFSwfm/cycGSDnoah/config.resources b/FV3GFSwfm/cycGSDnoah/config.resources new file mode 100755 index 0000000000..0ca8ca03a1 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.resources @@ -0,0 +1,395 @@ +#!/bin/ksh -x + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 + if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 + export npe_node_max=40 + fi +elif [[ "$machine" = "WCOSS_C" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "JET" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "HERA" ]]; then + export npe_node_max=40 +fi + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=4" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=10 + export nth_waveinit=1 + export npe_node_waveinit=$(echo "$npe_node_max / $nth_waveinit" | bc) + export NTASKS=${npe_waveinit} + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:30:00" + export npe_waveprep=65 + export nth_waveprep=1 + export npe_node_waveprep=$(echo "$npe_node_max / $nth_waveprep" | bc) + export NTASKS=${npe_waveprep} + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="06:00:00" + export npe_wavepostsbs=10 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$(echo "$npe_node_max / $nth_wavepostsbs" | bc) + export NTASKS=${npe_wavepostsbs} + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="03:00:00" + export npe_wavepostbndpnt=56 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=$(echo "$npe_node_max / $nth_wavepostbndpnt" | bc) + export NTASKS=${npe_wavepostbndpnt} + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="03:00:00" + export npe_wavepostpnt=56 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=$(echo "$npe_node_max / $nth_wavepostpnt" | bc) + export NTASKS=${npe_wavepostpnt} + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="01:00:00" + export npe_wavegempak=$npe_node_max + export nth_wavegempak=1 + export npe_node_wavegempak=$(echo "$npe_node_max / $nth_wavegempak" | bc) + export NTASKS=${npe_wavegempak} + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:30:00" + export npe_waveawipsbulls=$npe_node_max + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=${npe_waveawipsbulls} + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="00:30:00" + export npe_waveawipsgridded=$npe_node_max + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=${npe_waveawipsgridded} + +elif [ $step = "anal" ]; then + + export wtime_anal="02:00:00" + export npe_anal=800 + export nth_anal=4 + if [ $CASE = "C384" ]; then + export npe_anal=160 + export nth_anal=10 + fi + if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export npe_anal=84; fi + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_anal=7; fi + export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) + export nth_cycle=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_anal="3072M"; fi + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="02:00:00" + export npe_analcalc=127 + export nth_analcalc=1 + export npe_node_analcalc=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="02:00:00" + export npe_analdiag=112 + export nth_analdiag=1 + export npe_node_analdiag=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_analdiag="3072M"; fi + +elif [ $step = "gldas" ]; then + + export wtime_gldas="02:00:00" + export npe_gldas=96 + export nth_gldas=1 + export npe_node_gldas=$npe_node_max + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=24 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_gldas=112 ; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_gldas="3072M"; fi + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:00:00" + export wtime_fcst_gfs="08:00:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi + +elif [ $step = "post" ]; then + + export wtime_post="02:00:00" + export wtime_post_gfs="06:00:00" + export npe_post=48 + export nth_post=1 + export npe_node_post=12 + export npe_node_dwn=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=14 ; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_post="3072M"; fi + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=1 + export nth_wafs=1 + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=1 + export nth_wafsgcip=1 + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=1 + export npe_node_wafsgrib2=1 + export nth_wafsgrib2=1 + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=1 + export nth_wafsblending=1 + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=1 + export npe_node_wafsgrib20p25=1 + export nth_wafsgrib20p25=1 + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=1 + export nth_wafsblending0p25=1 + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_vrfy="3072M" + elif [[ "$machine" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_metp="3072M" + elif [[ "$machine" == "THEIA" ]]; then + export memory_metp="16384M" + fi + +elif [ $step = "echgres" ]; then + + export wtime_echgres="01:00:00" + export npe_echgres=3 + export nth_echgres=$npe_node_max + export npe_node_echgres=1 + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=2048M" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:30:00" + export wtime_eomg="01:00:00" + if [ $CASE = "C768" ]; then + export npe_eobs=100 + elif [ $CASE = "C384" ]; then + export npe_eobs=42 + elif [ $CASE = "C192" ]; then + export npe_eobs=28 + elif [ $CASE = "C96" -o $CASE = "C48" ]; then + export npe_eobs=14 + fi + export nth_eobs=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi + export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_eobs="3072M"; fi + +elif [ $step = "ediag" ]; then + + export wtime_ediag="02:00:00" + export npe_ediag=56 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_ediag="3072M"; fi + +elif [ $step = "eupd" ]; then + + export wtime_eupd="01:30:00" + if [ $CASE = "C768" ]; then + export npe_eupd=540 + export nth_eupd=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export nth_eupd=9 + fi + elif [ $CASE = "C384" ]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export nth_eupd=9 + fi + if [[ "$machine" = "HERA" ]]; then + export npe_eupd=84 + export nth_eupd=10 + fi + elif [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then + export npe_eupd=42 + export nth_eupd=2 + fi + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_eupd="3072M" + fi + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:30:00" + export npe_ecen=80 + export nth_ecen=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi + if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + if [[ "$machine" == "WCOSS_C" ]]; then export memory_ecen="3072M"; fi + +elif [ $step = "esfc" ]; then + + export wtime_esfc="03:00:00" + export npe_esfc=80 + export npe_node_esfc=$npe_node_max + export nth_esfc=1 + export nth_cycle=$nth_esfc + if [[ "$machine" == "WCOSS_C" ]]; then export memory_esfc="3072M"; fi + +elif [ $step = "efcs" ]; then + + export wtime_efcs="03:00:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_efcs="254M"; fi + +elif [ $step = "epos" ]; then + + export wtime_epos="03:00:00" + export npe_epos=80 + export nth_epos=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_epos="254M"; fi + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=1 + export npe_node_postsnd=4 + export npe_postsndcfp=9 + export npe_node_postsndcfp=3 + if [ $OUTPUT_FILE == "nemsio" ]; then + export npe_postsnd=13 + export npe_node_postsnd=4 + fi + if [[ "$machine" = "HERA" ]]; then export npe_node_postsnd=2; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_postsnd="254M"; fi + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=4 + export npe_node_awips=4 + export nth_awips=2 + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + export npe_awips=2 + export npe_node_awips=2 + export nth_awips=1 + fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_awips="2048M"; fi + +elif [ $step = "gempak" ]; then + + export wtime_gempak="02:00:00" + export npe_gempak=17 + export npe_node_gempak=4 + export nth_gempak=3 + if [[ "$machine" == "WCOSS_C" ]]; then export memory_gempak="254M"; fi + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/cycGSDnoah/config.vrfy b/FV3GFSwfm/cycGSDnoah/config.vrfy new file mode 100755 index 0000000000..796e293008 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.vrfy @@ -0,0 +1,193 @@ +#!/bin/ksh -x + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. $EXPDIR/config.resources vrfy + +export VDUMP="gfs" # Verifying dump +export CDUMPFCST="gdas" # Fit-to-obs with GDAS/GFS prepbufr +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis + +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYFITS="YES" # Fit to observations +export VSDB_STEP1="YES" # Populate VSDB database +export VSDB_STEP2="NO" +export VRFYG2OBS="YES" # Grid to observations, see note below if turning ON +export VRFYPRCP="YES" # Precip threat scores +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#------------------------------------------------- +# Fit to Observations +#------------------------------------------------- + +if [ $VRFYFITS = "YES" ]; then + + export PRVT=$HOMEgfs/fix/fix_gsi/prepobs_errtable.global + export HYBLEVS=$HOMEgfs/fix/fix_am/global_hyblev.l${LEVS}.txt + export CUE2RUN=$QUEUE + + export VBACKUP_FITS=24 + + export CONVNETC="NO" + if [ ${netcdf_diag:-".false."} = ".true." ]; then + export CONVNETC="YES" + fi + + if [ $machine = "WCOSS_C" ]; then + export fitdir="$BASE_GIT/verif/global/parafits.fv3nems/batrun" + export PREPQFITSH="$fitdir/subfits_cray_nems" + elif [ $machine = "WCOSS_DELL_P3" ]; then + export fitdir="$BASE_GIT/verif/global/Fit2Obs/ncf-vqc/batrun" + export PREPQFITSH="$fitdir/subfits_dell_nems" + elif [ $machine = "HERA" ]; then + #export fitdir="$BASE_GIT/Fit2Obs/batrun" + export fitdir="$BASE_GIT/verif/global/Fit2Obs/ncf-vqc/batrun" + export PREPQFITSH="$fitdir/subfits_hera_slurm" + fi + +fi + + +#---------------------------------------------------------- +# VSDB STEP1, Verify Precipipation and Grid To Obs options +#---------------------------------------------------------- +# All these call $VSDBSH + +if [ "$CDUMP" = "gfs" ] ; then + ddd=`echo $CDATE |cut -c 1-8` + #if [ $ddd -eq 5 -o $ddd -eq 10 ]; then export VSDB_STEP2 = "YES" ;fi + + if [ $VSDB_STEP1 = "YES" -o $VSDB_STEP2 = "YES" -o $VRFYPRCP = "YES" -o $VRFYG2OBS = "YES" ]; then + export BACKDATEVSDB=24 # execute vsdbjob for the previous day + export VBACKUP_PRCP=24 # back up for QPF verification data + export vsdbsave="$NOSCRUB/archive/vsdb_data" # place to save vsdb database + export vsdbhome=$BASE_VERIF # location of global verification scripts + export VSDBSH="$vsdbhome/vsdbjob.sh" # VSDB job script + export vlength=$FHMAX_GFS # verification length + export vhr_rain=$FHMAX_GFS # verification length for precip + export ftyplist="pgbq" # verif. files used for computing QPF ETS scores + export ptyplist="PRATE" # precip types in GRIB: PRATE or APCP + export anltype="gfs" # default=gfs, analysis type (gfs or gdas) for verification + export rain_bucket=6 # prate in pgb files is 6-hr accumulated + + export VSDB_START_DATE="$SDATE" # starting date for vsdb maps + export webhost="emcrzdm.ncep.noaa.gov" # webhost(rzdm) computer + export webhostid="$USER" # webhost(rzdm) user name + export SEND2WEB="NO" # whether or not to send maps to webhost + export WEBDIR="/home/people/emc/www/htdocs/gmb/${webhostid}/vsdb/$PSLOT" + export mdlist="gfs $PSLOT " # exps (up to 10) to compare in maps + fi +fi + + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [ $VRFYRAD = "YES" -o $VRFYMINMON = "YES" -o $VRFYOZN = "YES" ]; then + + export envir="para" + + # Radiance Monitoring + if [[ "$VRFYRAD" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + + export RADMON_SUFFIX=$PSLOT + export TANKverf="$NOSCRUB/monitor/radmon" + export VRFYRADSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "$VRFYMINMON" = "YES" ]] ; then + + export MINMON_SUFFIX=$PSLOT + export M_TANKverf="$NOSCRUB/monitor/minmon" + if [[ "$CDUMP" = "gdas" ]] ; then + export VRFYMINSH="$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "$CDUMP" = "gfs" ]] ; then + export VRFYMINSH="$HOMEgfs/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "$VRFYOZN" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + + export HOMEgfs_ozn="$HOMEgfs" + export OZNMON_SUFFIX=$PSLOT + export TANKverf_ozn="$NOSCRUB/monitor/oznmon" + export VRFYOZNSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=v1.1.15.1 +if [ $machine = "WCOSS_DELL_P3" ] ; then + export ens_tracker_ver=v1.1.15.3 +fi +export HOMEens_tracker=$BASE_GIT/tracker/ens_tracker.${ens_tracker_ver} + + +if [ "$VRFYTRAK" = "YES" ]; then + + export TRACKERSH="$HOMEgfs/jobs/JGFS_CYCLONE_TRACKER" + if [ "$CDUMP" = "gdas" ]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=$FHMAX + else + export FHOUT_CYCLONE=6 + export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + fi + if [ $machine = "HERA" ]; then + export COMROOTp1="/scratch1/NCEPDEV/global/glopara/com" + export COMINsyn=${COMINsyn:-${COMROOTp1}/gfs/prod/syndat} + else + export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} + fi +fi + + +if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then + + export GENESISSH="$HOMEgfs/jobs/JGFS_CYCLONE_GENESIS" +fi + +if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then + + export GENESISFSU="$HOMEgfs/jobs/JGFS_FSU_GENESIS" +fi + +if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then + + if [ $machine = "WCOSS_C" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.cray" + elif [ $machine = "WCOSS_DELL_P3" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.dell" + elif [ $machine = "HERA" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on $machine!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafs b/FV3GFSwfm/cycGSDnoah/config.wafs new file mode 100755 index 0000000000..dafded38a1 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafs @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export fcsthrs=120 +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafsblending b/FV3GFSwfm/cycGSDnoah/config.wafsblending new file mode 100755 index 0000000000..dfd7d1715e --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafsblending @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafsblending0p25 b/FV3GFSwfm/cycGSDnoah/config.wafsblending0p25 new file mode 100755 index 0000000000..28a2de90ff --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafsgcip b/FV3GFSwfm/cycGSDnoah/config.wafsgcip new file mode 100755 index 0000000000..5c166a541b --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafsgcip @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafsgrib2 b/FV3GFSwfm/cycGSDnoah/config.wafsgrib2 new file mode 100755 index 0000000000..27b137cd8c --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafsgrib2 @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/cycGSDnoah/config.wafsgrib20p25 b/FV3GFSwfm/cycGSDnoah/config.wafsgrib20p25 new file mode 100755 index 0000000000..8b55333c00 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/cycGSDnoah/config.wave b/FV3GFSwfm/cycGSDnoah/config.wave new file mode 100755 index 0000000000..df37ff49d3 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wave @@ -0,0 +1,121 @@ +#!/bin/ksh -x + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${CDUMP}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD='gnh_10m aoc_9km gsh_15m' +export waveGRDN='1 2 3' # gridnumber for ww3_multi +export waveGRDG='10 20 30' # gridgroup for ww3_multi + +# ESMF input grid +export waveesmfGRD='glox_10m' # input grid + +# Grids for input fields +export WAVEICE_DID=sice +export WAVEICE_FID=glix_10m +export WAVECUR_DID=rtofs +export WAVECUR_FID=glix_10m +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=points +export waveinterpGRD='glo_15mxt' # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavesbsGRD='' # side-by-side grids generated as wave model runs, writes to com +export wavepostGRD='gnh_10m aoc_9km gsh_15m' # Native grids that will be post-processed (grib2) + +# CDATE +export CDATE=${PDY}${cyc} + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} + +# gridded and point output rate +export DTFLD_WAV=`expr $FHOUT_HF_WAV \* 3600` +export DTPNT_WAV=3600 +export FHINCP_WAV=`expr $DTPNT_WAV / 3600` + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + WAVNCYC=4 + WAVHCYC=6 + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days + WAVHCYC=`expr 24 / ${gfs_cyc}` +else + WAVHCYC=0 + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi +export FHMAX_WAV_CUR WAVHCYC WAVNCYC + +# Restart timing business +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export RSTTYPE_WAV='T' # generate second tier of restart files + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + export RSTTYPE_WAV='F' # generate second tier of restart files + if [ $rst_dt_gfs -gt 0 ]; then export RSTTYPE_WAV='T' ; fi + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=`echo $RUNMEM | grep -o '..$'` +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +export WW3ICEINP='YES' +export WW3CURINP='YES' + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +echo "END: config.wave" diff --git a/FV3GFSwfm/cycGSDnoah/config.waveawipsbulls b/FV3GFSwfm/cycGSDnoah/config.waveawipsbulls new file mode 100755 index 0000000000..ec39bfb646 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.waveawipsbulls @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/cycGSDnoah/config.waveawipsgridded b/FV3GFSwfm/cycGSDnoah/config.waveawipsgridded new file mode 100755 index 0000000000..7f2972bb24 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.waveawipsgridded @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/cycGSDnoah/config.wavegempak b/FV3GFSwfm/cycGSDnoah/config.wavegempak new file mode 100755 index 0000000000..ec1f59d25c --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wavegempak @@ -0,0 +1,16 @@ +#!/bin/ksh -x + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT/gempak" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/cycGSDnoah/config.waveinit b/FV3GFSwfm/cycGSDnoah/config.waveinit new file mode 100755 index 0000000000..93960e5e25 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.waveinit @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/cycGSDnoah/config.wavepostbndpnt b/FV3GFSwfm/cycGSDnoah/config.wavepostbndpnt new file mode 100755 index 0000000000..5ec96a697f --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/cycGSDnoah/config.wavepostpnt b/FV3GFSwfm/cycGSDnoah/config.wavepostpnt new file mode 100755 index 0000000000..276ca230a6 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wavepostpnt @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/cycGSDnoah/config.wavepostsbs b/FV3GFSwfm/cycGSDnoah/config.wavepostsbs new file mode 100755 index 0000000000..b9051776e3 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.wavepostsbs @@ -0,0 +1,28 @@ +#!/bin/ksh -x + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="gnh_10m" +export WAV_SUBGRB="WAV_ATLO_GRB WAV_EPAC_GRB WAV_WCST_GRB" +export WAV_ATLO_GRB="0 6 0 0 0 0 0 0 301 331 0 0 55000000 260000000 48 0 310000000 166667 166667 0 atlocn 0p16" +export WAV_EPAC_GRB="0 6 0 0 0 0 0 0 511 301 0 0 30000002 130000000 48 -20000000 215000000 166667 166667 0 epacif 0p16" +export WAV_WCST_GRB="0 6 0 0 0 0 0 0 241 151 0 0 50000000 210000000 48 25000000 250000000 166667 166667 0 wcoast 0p16" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/cycGSDnoah/config.waveprep b/FV3GFSwfm/cycGSDnoah/config.waveprep new file mode 100755 index 0000000000..aaf8675283 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/config.waveprep @@ -0,0 +1,46 @@ +#!/bin/ksh -x + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +export HOUR_INC=3 # This value should match with the one used in + # the wind update script +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV='3' +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' + +# Wind interval for standalone file-based runs +# Output stride +export WAV_WND_HOUR_INC=1 # This value should match with the one used in + # the wind update script +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} # Defaults to 3h for GEFSv12 +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} # Constant DT for GFSv16 from getgo +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/cycGSDnoah/cycGSDnoah.crontab b/FV3GFSwfm/cycGSDnoah/cycGSDnoah.crontab new file mode 100644 index 0000000000..7535672b89 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/cycGSDnoah.crontab @@ -0,0 +1,6 @@ + +#################### cycGSDnoah #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.2/bin/rocotorun -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.db -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml +################################################################# + diff --git a/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml b/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml new file mode 100644 index 0000000000..8a6a728713 --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml @@ -0,0 +1,1074 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1:ppn=4:tpp=1"> + + + + + 80:ppn=10:tpp=4"> + + + + + 4:ppn=40:tpp=1"> + + + + + 3:ppn=40:tpp=1"> + + + + + + + 81:ppn=10:tpp=4"> + + + + + 4:ppn=12:tpp=1"> + + + + + 3:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + + 5:ppn=20:tpp=2"> + + + + + 2:ppn=40:tpp=1"> + + + + + 90:ppn=6:tpp=6"> + + + + + 3:ppn=1:tpp=40"> + + + + + 14:ppn=6:tpp=6"> + + + + + 2:ppn=40:tpp=1"> + + + + + 11:ppn=40:tpp=1"> + + + + + 14:ppn=6:tpp=6"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=4:tpp=1"> + + + + + 80:ppn=10:tpp=4"> + + + + + 4:ppn=40:tpp=1"> + + + + + 155:ppn=10:tpp=4"> + + + + + 4:ppn=12:tpp=1"> + + + + + 1:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + + 20:ppn=2:tpp=1"> + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &SDATE; 06:00:00 + &SDATE; &EDATE; 06:00:00 + &SDATE; &EDATE; 06:00:00 + &SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS; + + + + &JOBS_DIR;/prep.sh + + &PSLOT;_gdasprep_@H + &ACCOUNT; + &QUEUE_PREP_GDAS; + &RESOURCES_PREP_GDAS; + &WALLTIME_PREP_GDAS; + + &NATIVE_PREP_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasprep.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + &DMPDIR;/gdas.@Y@m@d/@H/gdas.t@Hz.updated.status.tm00.bufr_d + + + + + + + + &JOBS_DIR;/anal.sh + + &PSLOT;_gdasanal_@H + &ACCOUNT; + &QUEUE_ANAL_GDAS; + &RESOURCES_ANAL_GDAS; + &WALLTIME_ANAL_GDAS; + + &NATIVE_ANAL_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanal.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + &JOBS_DIR;/analcalc.sh + + &PSLOT;_gdasanalcalc_@H + &ACCOUNT; + &QUEUE_ANALCALC_GDAS; + &RESOURCES_ANALCALC_GDAS; + &WALLTIME_ANALCALC_GDAS; + + &NATIVE_ANALCALC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanalcalc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + + &JOBS_DIR;/analdiag.sh + + &PSLOT;_gdasanaldiag_@H + &ACCOUNT; + &QUEUE_ANALDIAG_GDAS; + &RESOURCES_ANALDIAG_GDAS; + &WALLTIME_ANALDIAG_GDAS; + + &NATIVE_ANALDIAG_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanaldiag.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gdasfcst_@H + &ACCOUNT; + &QUEUE_FCST_GDAS; + &RESOURCES_FCST_GDAS; + &WALLTIME_FCST_GDAS; + + &NATIVE_FCST_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasfcst.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gdaspost#grp#_@H + &ACCOUNT; + &QUEUE_POST_GDAS; + &RESOURCES_POST_GDAS; + &WALLTIME_POST_GDAS; + + &NATIVE_POST_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaspost#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR&ROTDIR; + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gdasvrfy_@H + &ACCOUNT; + &QUEUE_VRFY_GDAS; + &RESOURCES_VRFY_GDAS; + &WALLTIME_VRFY_GDAS; + &MEMORY_VRFY_GDAS; + &NATIVE_VRFY_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasvrfy.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gdasarch_@H + &ACCOUNT; + &QUEUE_ARCH_GDAS; + &PARTITION_ARCH_GDAS; + &RESOURCES_ARCH_GDAS; + &WALLTIME_ARCH_GDAS; + &MEMORY_ARCH_GDAS; + &NATIVE_ARCH_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasarch.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + &ARCHIVE_TO_HPSS;YES + + + + + + + + &JOBS_DIR;/eobs.sh + + &PSLOT;_gdaseobs_@H + &ACCOUNT; + &QUEUE_EOBS_GDAS; + &RESOURCES_EOBS_GDAS; + &WALLTIME_EOBS_GDAS; + + &NATIVE_EOBS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaseobs.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + &JOBS_DIR;/ediag.sh + + &PSLOT;_gdasediag_@H + &ACCOUNT; + &QUEUE_EDIAG_GDAS; + &RESOURCES_EDIAG_GDAS; + &WALLTIME_EDIAG_GDAS; + + &NATIVE_EDIAG_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasediag.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + &JOBS_DIR;/eupd.sh + + &PSLOT;_gdaseupd_@H + &ACCOUNT; + &QUEUE_EUPD_GDAS; + &RESOURCES_EUPD_GDAS; + &WALLTIME_EUPD_GDAS; + + &NATIVE_EUPD_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaseupd.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + 000 001 002 + f003 f006 f009 + f003 f006 f009 + + + + &JOBS_DIR;/ecen.sh + + &PSLOT;_gdasecen#grp#_@H + &ACCOUNT; + &QUEUE_ECEN_GDAS; + &RESOURCES_ECEN_GDAS; + &WALLTIME_ECEN_GDAS; + + &NATIVE_ECEN_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasecen#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + + + &JOBS_DIR;/esfc.sh + + &PSLOT;_gdasesfc_@H + &ACCOUNT; + &QUEUE_ESFC_GDAS; + &RESOURCES_ESFC_GDAS; + &WALLTIME_ESFC_GDAS; + + &NATIVE_ESFC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasesfc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 + + + + &JOBS_DIR;/efcs.sh + + &PSLOT;_gdasefcs#grp#_@H + &ACCOUNT; + &QUEUE_EFCS_GDAS; + &RESOURCES_EFCS_GDAS; + &WALLTIME_EFCS_GDAS; + + &NATIVE_EFCS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasefcs#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + + + + + + + &JOBS_DIR;/echgres.sh + + &PSLOT;_gdasechgres_@H + &ACCOUNT; + &QUEUE_ECHGRES_GDAS; + &RESOURCES_ECHGRES_GDAS; + &WALLTIME_ECHGRES_GDAS; + + &NATIVE_ECHGRES_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasechgres.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + 000 001 002 003 004 005 006 + f003 f004 f005 f006 f007 f008 f009 + f003 f004 f005 f006 f007 f008 f009 + + + + &JOBS_DIR;/epos.sh + + &PSLOT;_gdasepos#grp#_@H + &ACCOUNT; + &QUEUE_EPOS_GDAS; + &RESOURCES_EPOS_GDAS; + &WALLTIME_EPOS_GDAS; + + &NATIVE_EPOS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasepos#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + + + + + + + + 00 01 02 03 04 + + + + &JOBS_DIR;/earc.sh + + &PSLOT;_gdasearc#grp#_@H + &ACCOUNT; + &QUEUE_EARC_GDAS; + &PARTITION_EARC_GDAS; + &RESOURCES_EARC_GDAS; + &WALLTIME_EARC_GDAS; + &MEMORY_EARC_GDAS; + &NATIVE_EARC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasearc#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + &JOBS_DIR;/prep.sh + + &PSLOT;_gfsprep_@H + &ACCOUNT; + &QUEUE_PREP_GFS; + &RESOURCES_PREP_GFS; + &WALLTIME_PREP_GFS; + + &NATIVE_PREP_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsprep.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + &DMPDIR;/gfs.@Y@m@d/@H/gfs.t@Hz.updated.status.tm00.bufr_d + + + + + + + + &JOBS_DIR;/anal.sh + + &PSLOT;_gfsanal_@H + &ACCOUNT; + &QUEUE_ANAL_GFS; + &RESOURCES_ANAL_GFS; + &WALLTIME_ANAL_GFS; + + &NATIVE_ANAL_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsanal.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + &JOBS_DIR;/analcalc.sh + + &PSLOT;_gfsanalcalc_@H + &ACCOUNT; + &QUEUE_ANALCALC_GFS; + &RESOURCES_ANALCALC_GFS; + &WALLTIME_ANALCALC_GFS; + + &NATIVE_ANALCALC_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsanalcalc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.loginc.txt + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + &ACCOUNT; + &QUEUE_FCST_GFS; + &RESOURCES_FCST_GFS; + &WALLTIME_FCST_GFS; + + &NATIVE_FCST_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020 021 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + &ACCOUNT; + &QUEUE_POST_GFS; + &RESOURCES_POST_GFS; + &WALLTIME_POST_GFS; + + &NATIVE_POST_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR&ROTDIR; + + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + &ACCOUNT; + &QUEUE_VRFY_GFS; + &RESOURCES_VRFY_GFS; + &WALLTIME_VRFY_GFS; + &MEMORY_VRFY_GFS; + &NATIVE_VRFY_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/postsnd.sh + + &PSLOT;_gfspostsnd_@H + &ACCOUNT; + &QUEUE_POSTSND_GFS; + &RESOURCES_POSTSND_GFS; + &WALLTIME_POSTSND_GFS; + + &NATIVE_POSTSND_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfspostsnd.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + &ACCOUNT; + &QUEUE_ARCH_GFS; + &PARTITION_ARCH_GFS; + &RESOURCES_ARCH_GFS; + &WALLTIME_ARCH_GFS; + &MEMORY_ARCH_GFS; + &NATIVE_ARCH_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + &ARCHIVE_TO_HPSS;YES + + + + + + + + sleep 1 + + &PSLOT;_firstcyc_@H + &ACCOUNT; + &QUEUE; + &PARTITION_ARCH; + &RESOURCES_ARCH_GDAS; + &WALLTIME_ARCH_GDAS; + &NATIVE_ARCH_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/firstcyc.log + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + + + + + diff --git a/FV3GFSwfm/cycGSDnoah/logs/jlogfile b/FV3GFSwfm/cycGSDnoah/logs/jlogfile new file mode 100644 index 0000000000..95f5855efb --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/logs/jlogfile @@ -0,0 +1,157 @@ +09/17 19:47:15Z efcs.3564-HAS BEGUN on h19c23 +09/17 19:47:15Z efcs.3564-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.3564/mem003/INPUT, ABORT! +09/17 19:47:16Z efcs.152635-HAS BEGUN on h13c05 +09/17 19:47:16Z efcs.106596-HAS BEGUN on h3c07 +09/17 19:47:16Z efcs.152635-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.152635/mem005/INPUT, ABORT! +09/17 19:47:16Z efcs.106596-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.106596/mem007/INPUT, ABORT! +09/17 19:47:17Z efcs.109290-HAS BEGUN on h12c02 +09/17 19:47:17Z efcs.109290-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.109290/mem013/INPUT, ABORT! +09/17 19:47:18Z efcs.87703-HAS BEGUN on h6c47 +09/17 19:47:18Z efcs.155161-HAS BEGUN on h17c01 +09/17 19:47:18Z efcs.87703-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.87703/mem035/INPUT, ABORT! +09/17 19:47:18Z efcs.155161-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.155161/mem021/INPUT, ABORT! +09/17 19:47:18Z efcs.223768-HAS BEGUN on h21c03 +09/17 19:47:18Z efcs.67124-HAS BEGUN on h12c31 +09/17 19:47:18Z efcs.223768-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.223768/mem033/INPUT, ABORT! +09/17 19:47:18Z efcs.67124-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.67124/mem019/INPUT, ABORT! +09/17 19:47:18Z efcs.118822-HAS BEGUN on h8c01 +09/17 19:47:18Z efcs.118822-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.118822/mem017/INPUT, ABORT! +09/17 19:47:19Z efcs.268582-HAS BEGUN on h4c19 +09/17 19:47:19Z efcs.268582-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.268582/mem037/INPUT, ABORT! +09/17 19:47:19Z efcs.32381-HAS BEGUN on h6c01 +09/17 19:47:20Z efcs.32381-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.32381/mem015/INPUT, ABORT! +09/17 19:47:20Z efcs.294139-HAS BEGUN on h9c09 +09/17 19:47:20Z efcs.294139-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.294139/mem011/INPUT, ABORT! +09/17 19:47:20Z efcs.34544-HAS BEGUN on h9c46 +09/17 19:47:20Z efcs.34544-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.34544/mem031/INPUT, ABORT! +09/17 19:47:20Z efcs.165192-HAS BEGUN on h3c50 +09/17 19:47:20Z efcs.165192-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.165192/mem029/INPUT, ABORT! +09/17 19:47:21Z efcs.202337-HAS BEGUN on h2c01 +09/17 19:47:21Z efcs.202337-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.202337/mem001/INPUT, ABORT! +09/17 19:47:22Z efcs.244912-HAS BEGUN on h13c50 +09/17 19:47:22Z efcs.244912-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.244912/mem027/INPUT, ABORT! +09/17 19:47:22Z efcs.69938-HAS BEGUN on h6c15 +09/17 19:47:22Z efcs.69938-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.69938/mem023/INPUT, ABORT! +09/17 19:47:24Z efcs.73837-HAS BEGUN on h17c15 +09/17 19:47:25Z efcs.73837-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.73837/mem025/INPUT, ABORT! +09/17 19:47:25Z efcs.19172-HAS BEGUN on h8c38 +09/17 19:47:25Z efcs.19172-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.19172/mem039/INPUT, ABORT! +09/17 19:47:27Z efcs.263680-HAS BEGUN on h5c16 +09/17 19:47:27Z efcs.263680-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.263680/mem009/INPUT, ABORT! +09/17 19:47:45Z efcs.107570-HAS BEGUN on h3c07 +09/17 19:47:45Z efcs.107570-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.107570/mem005/INPUT, ABORT! +09/17 19:47:46Z efcs.4546-HAS BEGUN on h19c23 +09/17 19:47:46Z efcs.4546-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.4546/mem007/INPUT, ABORT! +09/17 19:47:46Z efcs.33354-HAS BEGUN on h6c01 +09/17 19:47:46Z efcs.33354-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.33354/mem019/INPUT, ABORT! +09/17 19:47:46Z efcs.295112-HAS BEGUN on h9c09 +09/17 19:47:46Z efcs.269565-HAS BEGUN on h4c19 +09/17 19:47:46Z efcs.295112-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.295112/mem011/INPUT, ABORT! +09/17 19:47:46Z efcs.88683-HAS BEGUN on h6c47 +09/17 19:47:46Z efcs.269565-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.269565/mem033/INPUT, ABORT! +09/17 19:47:47Z efcs.68101-HAS BEGUN on h12c31 +09/17 19:47:47Z efcs.88683-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.88683/mem037/INPUT, ABORT! +09/17 19:47:47Z efcs.68101-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.68101/mem017/INPUT, ABORT! +09/17 19:47:47Z efcs.153612-HAS BEGUN on h13c05 +09/17 19:47:47Z efcs.153612-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.153612/mem003/INPUT, ABORT! +09/17 19:47:47Z efcs.110275-HAS BEGUN on h12c02 +09/17 19:47:47Z efcs.110275-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.110275/mem015/INPUT, ABORT! +09/17 19:47:47Z efcs.35524-HAS BEGUN on h9c46 +09/17 19:47:47Z efcs.35524-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.35524/mem031/INPUT, ABORT! +09/17 19:47:47Z efcs.156137-HAS BEGUN on h17c01 +09/17 19:47:47Z efcs.91315-HAS BEGUN on h10c10 +09/17 19:47:47Z efcs.156137-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.156137/mem023/INPUT, ABORT! +09/17 19:47:47Z efcs.91315-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.91315/mem029/INPUT, ABORT! +09/17 19:47:47Z efcs.119797-HAS BEGUN on h8c01 +09/17 19:47:48Z efcs.119797-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.119797/mem013/INPUT, ABORT! +09/17 19:47:48Z efcs.166167-HAS BEGUN on h3c50 +09/17 19:47:48Z efcs.166167-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.166167/mem039/INPUT, ABORT! +09/17 19:47:48Z efcs.68010-HAS BEGUN on h20c01 +09/17 19:47:48Z efcs.68010-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.68010/mem035/INPUT, ABORT! +09/17 19:47:48Z efcs.203312-HAS BEGUN on h2c01 +09/17 19:47:49Z efcs.203312-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.203312/mem001/INPUT, ABORT! +09/17 19:47:51Z efcs.70911-HAS BEGUN on h6c15 +09/17 19:47:51Z efcs.70911-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.70911/mem021/INPUT, ABORT! +09/17 19:47:51Z efcs.264653-HAS BEGUN on h5c16 +09/17 19:47:52Z efcs.264653-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.264653/mem009/INPUT, ABORT! +09/17 19:47:52Z efcs.74813-HAS BEGUN on h17c15 +09/17 19:47:52Z efcs.74813-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.74813/mem025/INPUT, ABORT! +09/17 19:47:53Z efcs.245890-HAS BEGUN on h13c50 +09/17 19:47:54Z efcs.245890-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2019093018/gdas/efcs.245890/mem027/INPUT, ABORT! +09/17 20:02:36Z efcs.207456-HAS BEGUN on h9c02 +09/17 20:05:18Z efcs.37140-HAS BEGUN on h36m22 +09/17 20:18:18Z efcs.26930-HAS BEGUN on h20c05 +09/17 20:18:31Z fcst.89376-HAS BEGUN on h5c27 +09/17 20:45:03Z efcs.38684-HAS BEGUN on h20c05 +09/18 23:39:43Z efcs.182493-HAS BEGUN on h2c07 +09/18 23:45:05Z efcs.156212-HAS BEGUN on h8c56 +09/22 00:26:16Z efcs.247608-HAS BEGUN on h4c17 +09/22 17:23:29Z efcs.147949-HAS BEGUN on h1c07 +09/22 17:25:48Z efcs.242977-HAS BEGUN on h1c03 +09/22 17:26:07Z efcs.300343-HAS BEGUN on h3c54 +09/22 17:26:09Z efcs.239072-HAS BEGUN on h11c32 +09/22 17:26:15Z efcs.237325-HAS BEGUN on h13c15 +09/22 17:26:17Z efcs.22992-HAS BEGUN on h6c33 +09/22 17:26:20Z efcs.14404-HAS BEGUN on h6c06 +09/22 17:26:24Z efcs.310060-HAS BEGUN on h2c10 +09/22 17:26:45Z efcs.281284-HAS BEGUN on h5c06 +09/22 17:26:46Z efcs.145709-HAS BEGUN on h4c39 +09/22 17:26:49Z efcs.140852-HAS BEGUN on h2c27 +09/22 17:26:50Z efcs.71167-HAS BEGUN on h1c11 +09/22 17:26:50Z efcs.160238-HAS BEGUN on h21c52 +09/22 17:27:15Z efcs.119082-HAS BEGUN on h6c04 +09/22 17:27:30Z efcs.277339-HAS BEGUN on h1c02 +09/22 17:27:39Z efcs.117081-HAS BEGUN on h1c05 +09/22 17:27:39Z efcs.120361-HAS BEGUN on h11c04 +09/22 17:27:42Z efcs.147570-HAS BEGUN on h2c19 +09/22 17:27:53Z efcs.126285-HAS BEGUN on h13c19 +09/22 17:28:07Z efcs.33870-HAS BEGUN on h22c56 +09/22 17:35:08Z fcst.155817-HAS BEGUN on h1c07 +09/22 17:35:16Z efcs.23142-HAS BEGUN on h10c17 +09/22 17:35:26Z efcs.218227-HAS BEGUN on h9c45 +09/23 16:50:00Z efcs.294891-HAS BEGUN on h4c16 +09/23 17:04:54Z efcs.274039-HAS BEGUN on h23c01 +09/23 17:11:48Z efcs.60370-HAS BEGUN on h4c13 +09/23 23:28:45Z fcst.214432-HAS BEGUN on h2c02 +10/02 06:57:42Z efcs.67956-HAS BEGUN on h2c28 +10/02 06:57:42Z efcs.67956-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.67956/mem003/INPUT, ABORT! +10/02 06:57:43Z efcs.290011-HAS BEGUN on h6c07 +10/02 06:57:43Z efcs.290011-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.290011/mem001/INPUT, ABORT! +10/02 06:57:47Z efcs.68905-HAS BEGUN on h2c28 +10/02 06:57:47Z efcs.68905-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.68905/mem005/INPUT, ABORT! +10/02 06:57:49Z efcs.290966-HAS BEGUN on h6c07 +10/02 06:57:49Z efcs.290966-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.290966/mem007/INPUT, ABORT! +10/02 06:57:52Z efcs.69853-HAS BEGUN on h2c28 +10/02 06:57:52Z efcs.69853-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.69853/mem009/INPUT, ABORT! +10/02 06:57:56Z efcs.291923-HAS BEGUN on h6c07 +10/02 06:57:56Z efcs.291923-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.291923/mem011/INPUT, ABORT! +10/02 06:57:57Z efcs.70802-HAS BEGUN on h2c28 +10/02 06:57:58Z efcs.70802-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.70802/mem013/INPUT, ABORT! +10/02 06:58:01Z efcs.292877-HAS BEGUN on h6c07 +10/02 06:58:01Z efcs.292877-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.292877/mem015/INPUT, ABORT! +10/02 06:58:02Z efcs.71764-HAS BEGUN on h2c28 +10/02 06:58:02Z efcs.71764-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.71764/mem017/INPUT, ABORT! +10/02 06:58:08Z efcs.12319-HAS BEGUN on h11c55 +10/02 06:58:08Z efcs.12319-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.12319/mem021/INPUT, ABORT! +10/02 06:58:09Z efcs.293835-HAS BEGUN on h6c07 +10/02 06:58:09Z efcs.293835-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.293835/mem019/INPUT, ABORT! +10/02 06:58:13Z efcs.13269-HAS BEGUN on h11c55 +10/02 06:58:13Z efcs.13269-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.13269/mem023/INPUT, ABORT! +10/02 06:58:16Z efcs.294794-HAS BEGUN on h6c07 +10/02 06:58:17Z efcs.294794-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.294794/mem025/INPUT, ABORT! +10/02 06:58:23Z efcs.295754-HAS BEGUN on h6c07 +10/02 06:58:23Z efcs.295754-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.295754/mem029/INPUT, ABORT! +10/02 06:58:24Z efcs.72725-HAS BEGUN on h2c28 +10/02 06:58:24Z efcs.72725-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.72725/mem031/INPUT, ABORT! +10/02 06:58:24Z efcs.280730-HAS BEGUN on h16c20 +10/02 06:58:24Z efcs.280730-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.280730/mem027/INPUT, ABORT! +10/02 06:58:27Z efcs.44188-HAS BEGUN on h3c08 +10/02 06:58:27Z efcs.44188-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.44188/mem033/INPUT, ABORT! +10/02 06:58:29Z efcs.301034-HAS BEGUN on h4c11 +10/02 06:58:29Z efcs.301034-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.301034/mem039/INPUT, ABORT! +10/02 06:58:30Z efcs.296714-HAS BEGUN on h6c07 +10/02 06:58:30Z efcs.296714-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.296714/mem035/INPUT, ABORT! +10/02 06:58:33Z efcs.281684-HAS BEGUN on h16c20 +10/02 06:58:33Z efcs.281684-Initial conditions must exist in /scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/Judy.K.Henderson/RUNDIRS/cycGSDnoah/2020081918/gdas/efcs.281684/mem037/INPUT, ABORT! +10/02 07:01:51Z efcs.82602-HAS BEGUN on h10c55 +10/02 07:07:28Z fcst.144743-HAS BEGUN on h3c24 diff --git a/FV3GFSwfm/cycGSDnoah/new.xml b/FV3GFSwfm/cycGSDnoah/new.xml new file mode 100644 index 0000000000..0b6c66346e --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/new.xml @@ -0,0 +1,1025 @@ + + + + + + + + + + + + + + + + + + + + + + + + + +1:ppn=4:tpp=1"> + + + +80:ppn=10:tpp=4"> + + + +4:ppn=40:tpp=1"> + + + +3:ppn=40:tpp=1"> + + + +81:ppn=10:tpp=4"> + + + +4:ppn=12:tpp=1"> + + + +3:ppn=1:tpp=1"> + + + + + +1:ppn=1:tpp=1"> + + + + +5:ppn=20:tpp=2"> + + + +2:ppn=40:tpp=1"> + + + +90:ppn=6:tpp=6"> + + + +3:ppn=1:tpp=40"> + + + +14:ppn=6:tpp=6"> + + + +2:ppn=40:tpp=1"> + + + +11:ppn=40:tpp=1"> + + + +14:ppn=6:tpp=6"> + + + + +1:ppn=1:tpp=1"> + + + + +1:ppn=4:tpp=1"> + + + +80:ppn=10:tpp=4"> + + + +4:ppn=40:tpp=1"> + + + +155:ppn=10:tpp=4"> + + + +4:ppn=12:tpp=1"> + + + +1:ppn=1:tpp=1"> + + + + + +1:ppn=1:tpp=1"> + + + + +20:ppn=2:tpp=1"> + +]> + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/logs/@Y@m@d@H.log + + + 201909301800 201909301800 06:00:00 + 201909301800 201910020000 06:00:00 + 201909301800 201910020000 06:00:00 + 201910010000 201910020000 24:00:00 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/prep.sh + + cycGSDnoah_gdasprep_@H + gsd-fv3 + batch + 1:ppn=4:tpp=1 + 00:45:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasprep.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + /scratch1/NCEPDEV/global/glopara/dump/gdas.@Y@m@d/@H/gdas.t@Hz.updated.status.tm00.bufr_d + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/anal.sh + + cycGSDnoah_gdasanal_@H + gsd-fv3 + batch + 80:ppn=10:tpp=4 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasanal.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/analcalc.sh + + cycGSDnoah_gdasanalcalc_@H + gsd-fv3 + batch + 4:ppn=40:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasanalcalc.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/analdiag.sh + + cycGSDnoah_gdasanaldiag_@H + gsd-fv3 + batch + 3:ppn=40:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasanaldiag.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/fcst.sh + + cycGSDnoah_gdasfcst_@H + gsd-fv3 + debug + 81:ppn=10:tpp=4 + 00:30:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasfcst.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/post.sh + + cycGSDnoah_gdaspost#grp#_@H + gsd-fv3 + batch + 4:ppn=12:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdaspost#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.log#dep#.txt + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/vrfy.sh + + cycGSDnoah_gdasvrfy_@H + gsd-fv3 + batch + 3:ppn=1:tpp=1 + 03:00:00 + 16384M + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasvrfy.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/arch.sh + + cycGSDnoah_gdasarch_@H + gsd-fv3 + batch + service + 1:ppn=1:tpp=1 + 06:00:00 + 2048M + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasarch.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + YESYES + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/eobs.sh + + cycGSDnoah_gdaseobs_@H + gsd-fv3 + batch + 5:ppn=20:tpp=2 + 00:30:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdaseobs.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/ediag.sh + + cycGSDnoah_gdasediag_@H + gsd-fv3 + batch + 2:ppn=40:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasediag.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/eupd.sh + + cycGSDnoah_gdaseupd_@H + gsd-fv3 + batch + 90:ppn=6:tpp=6 + 01:30:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdaseupd.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + 000 001 002 + f003 f006 f009 + f003 f006 f009 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/ecen.sh + + cycGSDnoah_gdasecen#grp#_@H + gsd-fv3 + batch + 14:ppn=6:tpp=6 + 00:30:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasecen#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/esfc.sh + + cycGSDnoah_gdasesfc_@H + gsd-fv3 + batch + 2:ppn=40:tpp=1 + 03:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasesfc.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/efcs.sh + + cycGSDnoah_gdasefcs#grp#_@H + gsd-fv3 + batch + 11:ppn=40:tpp=1 + 03:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasefcs#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/echgres.sh + + cycGSDnoah_gdasechgres_@H + gsd-fv3 + batch + 3:ppn=1:tpp=40 + 01:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasechgres.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + 000 001 002 003 004 005 006 + f003 f004 f005 f006 f007 f008 f009 + f003 f004 f005 f006 f007 f008 f009 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/epos.sh + + cycGSDnoah_gdasepos#grp#_@H + gsd-fv3 + batch + 14:ppn=6:tpp=6 + 03:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasepos#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + + + + + + + + 00 01 02 03 04 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/earc.sh + + cycGSDnoah_gdasearc#grp#_@H + gsd-fv3 + batch + service + 1:ppn=1:tpp=1 + 06:00:00 + 2048M + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gdasearc#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/prep.sh + + cycGSDnoah_gfsprep_@H + gsd-fv3 + batch + 1:ppn=4:tpp=1 + 00:45:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsprep.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + /scratch1/NCEPDEV/global/glopara/dump/gfs.@Y@m@d/@H/gfs.t@Hz.updated.status.tm00.bufr_d + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/anal.sh + + cycGSDnoah_gfsanal_@H + gsd-fv3 + batch + 80:ppn=10:tpp=4 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsanal.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/analcalc.sh + + cycGSDnoah_gfsanalcalc_@H + gsd-fv3 + batch + 4:ppn=40:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsanalcalc.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.loginc.txt + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/fcst.sh + + cycGSDnoah_gfsfcst_@H + gsd-fv3 + batch + 155:ppn=10:tpp=4 + 08:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsfcst.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020 021 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/post.sh + + cycGSDnoah_gfspost#grp#_@H + gsd-fv3 + batch + 4:ppn=12:tpp=1 + 06:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfspost#grp#.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/vrfy.sh + + cycGSDnoah_gfsvrfy_@H + gsd-fv3 + batch + 1:ppn=1:tpp=1 + 06:00:00 + 16384M + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsvrfy.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/postsnd.sh + + cycGSDnoah_gfspostsnd_@H + gsd-fv3 + batch + 20:ppn=2:tpp=1 + 02:00:00 + + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfspostsnd.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/jobs/rocoto/arch.sh + + cycGSDnoah_gfsarch_@H + gsd-fv3 + batch + service + 1:ppn=1:tpp=1 + 06:00:00 + 2048M + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/gfsarch.log + + SLURM_SETYES + RUN_ENVIRemc + HOMEgfs/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure + EXPDIR/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + YESYES + + + + + + + + sleep 1 + + cycGSDnoah_firstcyc_@H + gsd-fv3 + batch + service + 1:ppn=1:tpp=1 + 06:00:00 + --export=NONE + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/cycGSDnoah/logs/@Y@m@d@H/firstcyc.log + + + + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/logs/@Y@m@d@H.log + + + + + + + diff --git a/FV3GFSwfm/cycGSDnoah/run_cmds b/FV3GFSwfm/cycGSDnoah/run_cmds new file mode 100644 index 0000000000..06f2e0d33d --- /dev/null +++ b/FV3GFSwfm/cycGSDnoah/run_cmds @@ -0,0 +1,5 @@ +rocotorun -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.db +rocotostat -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.xml -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/cycGSDnoah.db + + +rocotorun -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/aeroics.xml -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycGSDnoah/aeroics.db diff --git a/FV3GFSwfm/cycemcv16/config.anal b/FV3GFSwfm/cycemcv16/config.anal new file mode 100755 index 0000000000..35ff5e8299 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.anal @@ -0,0 +1,130 @@ +#!/bin/ksh -x + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. $EXPDIR/config.resources anal + +if [ $DONST = "YES" ]; then + . $EXPDIR/config.nsst +fi + +if [[ "$CDUMP" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="NO" +fi + +export npe_gsi=$npe_anal + +if [[ "$CDUMP" == "gfs" ]] ; then + export npe_gsi=$npe_anal_gfs + export nth_anal=$nth_anal_gfs +fi + +# Set parameters specific to L127 +if [ $LEVS = "128" ]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,$SETUP" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Use experimental dumps in GFS v16 parallels +export ABIBF="/dev/null" +if [[ "$CDATE" -ge "2019022800" ]] ; then + export ABIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" + if [[ "$CDATE" -ge "2019111000" && "$CDATE" -le "2020052612" ]]; then + export ABIBF="$DMPDIR/${CDUMP}y.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" + fi +fi + +export AHIBF="/dev/null" +if [[ "$CDATE" -ge "2019042300" ]]; then + export AHIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.ahicsr.tm00.bufr_d" +fi + + +# Adjust data usage for GFS v16 parallels +# +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +# +# Set default values +export CONVINFO=$FIXgsi/global_convinfo.txt +export OZINFO=$FIXgsi/global_ozinfo.txt +export SATINFO=$FIXgsi/global_satinfo.txt +export OBERROR=$FIXgsi/prepobs_errtable.global + + +# Set convinfo and prepobs.errtable.global for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 +fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps +if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 +fi + +# Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations +if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "2020052612" ]]; then + export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020040718 +fi + +# NOTE: +# As of 2020052612, gfsv16_historical/global_convinfo.txt.2020052612 is +# identical to ../global_convinfo.txt. Thus, the logic below is not +# needed at this time. +# Assimilate COSMIC-2 GPS +##if [[ "$CDATE" -ge "2020052612" && "$CDATE" -lt "YYYYMMDDHH" ]]; then +## export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020052612 +##fi + + +# Turn off assmilation of OMPS during period of bad data +if [[ "$CDATE" -ge "2020011600" && "$CDATE" -lt "2020011806" ]]; then + export OZINFO=$FIXgsi/gfsv16_historical/global_ozinfo.txt.2020011600 +fi + + +# Set satinfo for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019021900 +fi + +# Turn on assimilation of Metop-C AMSUA and MHS +if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020022012" ]]; then + export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019110706 +fi + +# NOTE: +# As of 2020022012, gfsv16_historical/global_satinfo.txt.2020022012 is +# identical to ../global_satinfo.txt. Thus, the logic below is not +# needed at this time +# +# Turn off assmilation of all Metop-A MHS +## if [[ "$CDATE" -ge "2020022012" && "$CDATE" -lt "YYYYMMDDHH" ]]; then +## export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2020022012 +## fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/cycemcv16/config.analcalc b/FV3GFSwfm/cycemcv16/config.analcalc new file mode 100755 index 0000000000..5866ce5ac6 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.analcalc @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/cycemcv16/config.analdiag b/FV3GFSwfm/cycemcv16/config.analdiag new file mode 100755 index 0000000000..285e614d37 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.analdiag @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/cycemcv16/config.arch b/FV3GFSwfm/cycemcv16/config.arch new file mode 100755 index 0000000000..fe4363613a --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.arch @@ -0,0 +1,25 @@ +#!/bin/ksh -x + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/cycemcv16/config.awips b/FV3GFSwfm/cycemcv16/config.awips new file mode 100755 index 0000000000..76a9c21536 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.awips @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/cycemcv16/config.base b/FV3GFSwfm/cycemcv16/config.base new file mode 100644 index 0000000000..937898c2bb --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.base @@ -0,0 +1,290 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_ARCH="service" + +# Project to use in mass store: +HPSS_PROJECT=fim + +# Directories relative to installation areas: +export HOMEgfs=/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters +export NWPROD="/scratch1/NCEPDEV/global/glopara/nwpara" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" +export RTMFIX=$CRTM_FIX + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp3/$USER" +export PTMP="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/stmp4/$USER" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +#### CCPP Suite +#### export CCPP_SUITE="FV3_GSD_v0" # GSDsuite +export CCPP_SUITE="FV3_GSD_noah" # GSDsuite + NOAH LSM +export CCPP_SUITE="FV3_GFS_v16beta" # EMC v16beta + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/gfsv16b/obsproc_prep.iss70457.netcdfhistory" +export HOMEobsproc_network="$BASE_GIT/obsproc/gfsv16b/obsproc_global.iss71402.supportGFSv16" +export HOMEobsproc_global=$HOMEobsproc_network +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2020081918 +export EDATE=2020082100 +export assim_freq=6 +export PSLOT="cycemcv16" +export EXPDIR="/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/$PSLOT" +export ROTDIR="/scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSrun/$PSLOT" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +#JKH only dates 06/01-06/12 are available +#JKHif [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then +#JKH export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +#JKHfi +export RUNDIR="$STMP/RUNDIRS/$PSLOT" +export DATAROOT="$RUNDIR/$CDATE/$CDUMP" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ATARDIR="/BMC/$HPSS_PROJECT/2year/GSD_cyc_sep2020/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export jlogfile="${EXPDIR}/logs/jlogfile" +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF="NO" +export SENDCOM="NO" +export SENDDBN="NO" +export SENDDBN_NTC="NO" +export SENDSDM="NO" + +# Resolution specific parameters +export LEVS=65 ## JKH +export CASE="C768" +export CASE_ENKF="C384" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" ]] ; then + export FHCYC=0 + else + export FHCYC=24 + fi +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=6 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="netcdf" +export WRITE_DOPOST=".true." + +# suffix options depending on file format +if [ $OUTPUT_FILE = "netcdf" ]; then + export SUFFIX=".nc" + export NEMSIO_IN=".false." + export NETCDF_IN=".true." +else + export SUFFIX=".nemsio" + export NEMSIO_IN=".true." + export NETCDF_IN=".false." +fi + +# IAU related parameters +export DOIAU="NO" # Enable 4DIAU for control with 3 increments ## JKH +export IAUFHRS="3,6,9" +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF="NO" # Enable 4DIAU for EnKF ensemble ## JKH +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" ## JKH +export gldas_cyc=00 + +# run wave component +export DO_WAVE="NO" ## JKH +export WAVE_CDUMP="both" + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imp_physics=8 +else + export imp_physics=11 +fi + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=20 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run MET+ jobs ## JKH + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/cycemcv16/config.base.emc.dyn b/FV3GFSwfm/cycemcv16/config.base.emc.dyn new file mode 100755 index 0000000000..61944e9797 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.base.emc.dyn @@ -0,0 +1,290 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_ARCH="@QUEUE_ARCH@" + +# Project to use in mass store: +HPSS_PROJECT=fim + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters +export NWPROD="@NWPROD@" +export DMPDIR="@DMPDIR@" +export RTMFIX=$CRTM_FIX + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +#### CCPP Suite +#### export CCPP_SUITE="FV3_GSD_v0" # GSDsuite +export CCPP_SUITE="FV3_GSD_noah" # GSDsuite + NOAH LSM +#### export CCPP_SUITE="FV3_GFS_v16beta" # EMC v16beta + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/gfsv16b/obsproc_prep.iss70457.netcdfhistory" +export HOMEobsproc_network="$BASE_GIT/obsproc/gfsv16b/obsproc_global.iss71402.supportGFSv16" +export HOMEobsproc_global=$HOMEobsproc_network +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/$PSLOT" +export ROTDIR="@ROTDIR@/$PSLOT" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export RUNDIR="$STMP/RUNDIRS/$PSLOT" +export DATAROOT="$RUNDIR/$CDATE/$CDUMP" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ICSDIR="@ICSDIR@" +export ATARDIR="/BMC/$HPSS_PROJECT/1year/GSD_phys_tst_jul2019/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export jlogfile="${EXPDIR}/logs/jlogfile" +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF="NO" +export SENDCOM="NO" +export SENDDBN="NO" +export SENDDBN_NTC="NO" +export SENDSDM="NO" + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" ]] ; then + export FHCYC=0 + else + export FHCYC=24 + fi +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=6 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="netcdf" +export WRITE_DOPOST=".true." + +# suffix options depending on file format +if [ $OUTPUT_FILE = "netcdf" ]; then + export SUFFIX=".nc" + export NEMSIO_IN=".false." + export NETCDF_IN=".true." +else + export SUFFIX=".nemsio" + export NEMSIO_IN=".true." + export NETCDF_IN=".false." +fi + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF="YES" # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS=YES +export gldas_cyc=00 + +# run wave component +export DO_WAVE=YES +export WAVE_CDUMP="both" + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imp_physics=8 +else + export imp_physics=11 +fi + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=@NMEM_ENKF@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +if [ $OUTPUT_FILE = "nemsio" ]; then + export DO_CALC_INCREMENT="YES" + export DO_CALC_ANALYSIS="NO" +fi + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run MET+ jobs ## JKH + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/cycemcv16/config.base.nco.static b/FV3GFSwfm/cycemcv16/config.base.nco.static new file mode 100755 index 0000000000..4e66d41631 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.base.nco.static @@ -0,0 +1,238 @@ +#!/bin/ksh -x + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="WCOSS_DELL_P3" + +# EMC parallel or NCO production +export RUN_ENVIR="nco" + +# Account, queue, etc. +export ACCOUNT="FV3GFS-T2O" +export QUEUE="prod" +export QUEUE_ARCH="dev_transfer" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export PARMgfs=$HOMEgfs/parm +export FIXgfs=$HOMEgfs/fix +export USHgfs=$HOMEgfs/ush +export UTILgfs=$HOMEgfs/util +export EXECgfs=$HOMEgfs/exec +export SCRgfs=$HOMEgfs/scripts + +######################################################################## + +# GLOBAL static environment parameters + +export NWPROD="/gpfs/dell1/nco/ops/nwprod" +export DMPDIR="/gpfs/dell3/emc/global/dump" +export RTMFIX=$CRTM_FIX + + +# Machine specific paths used everywhere + +# USER specific paths +# export HOMEDIR="/gpfs/dell2/emc/modeling/noscrub/$USER" + export HOMEDIR=$EXPDIR/HOMEDIR +# export STMP="/gpfs/dell3/stmp/$USER" + export STMP=$DATAROOT +# export PTMP="/gpfs/dell3/ptmp/$USER" + export PTMP=$ROTDIR +# export NOSCRUB="/gpfs/dell2/emc/modeling/noscrub/$USER" + export NOSCRUB=$EXPDIR/NOSCRUB + + # Base directories for various builds + export BASE_GIT="/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git" + + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="YES" +export DO_GEMPAK="YES" +export DO_AWIPS="YES" +export WAFSF="YES" # WAFS products + +# NO for retrospective parallel; YES for real-time parallel +export REALTIME="YES" + + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="$HOMEgfs/fix/fix_gsi" +export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" +export HOMEpost="$HOMEgfs" +export HOMEobsproc_prep="$BASE_GIT/obsproc/obsproc_prep.iss-46886.fv3gfs" +export HOMEobsproc_network="$BASE_GIT/obsproc/obsproc_global.iss-46886.fv3gfs" +export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_CMD="chgrp rstprod" +export NEMSIOGET="$HOMEgfs/exec/nemsio_get" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="$HOMEgfs/env" +export BASE_JOB="$HOMEgfs/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2018080600 +export EDATE=2039123100 +export assim_freq=6 +export PSLOT="rtecffv3" +export EXPDIR="$EXPDIR" +export ROTDIR="$ROTDIR" +export ROTDIR_DUMP="YES" +export DUMP_SUFFIX="" +export RUNDIR="$DATAROOT" +export ARCDIR="$NOSCRUB/archive/$PSLOT" +export ICSDIR="/gpfs/dell2/ptmp/$USER/FV3ICS" +export ATARDIR="/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" +export RUN=${RUN:-${CDUMP:-"gfs"}} +export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos +export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave +export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDDBN=${SENDDBN:-"YES"} +export SENDDBN_NTC=${SENDDBN_NTC:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="C384" + +# Surface cycle update frequency +if [[ "$CDUMP" == "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "$CDUMP" == "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 + +# GFS cycle info +export gfs_cyc=4 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=384 +export FHMAX_GFS_06=384 +export FHMAX_GFS_12=384 +export FHMAX_GFS_18=384 +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_$cyc}) + +export FHOUT_GFS=3 +export FHMAX_HF_GFS=120 +export FHOUT_HF_GFS=1 + +# frequency for saving restart files. set to 6,12,24,48 etc +export restart_interval_gfs=12 + + +# I/O QUILTING, true--use Write Component; false--use GFDL FMS +# if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid +# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf +# WRITE_DOPOST=true, use inline POST +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export OUTPUT_FILE="nemsio" +export WRITE_DOPOST=".true." + +# IAU related parameters +export DOIAU="NO" +export IAUFHRS=6 +export IAU_FHROT=`echo $IAUFHRS | cut -c1` +export IAU_DELTHRS=6 +export DOIAU_ENKF="NO" +export IAUFHRS_ENKF=6 +export IAU_DELTHRS_ENKF=6 +if [[ "$SDATE" = "$CDATE" ]]; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# run GLDAS to spin up land ICs +export DO_GLDAS=YES +export gldas_cyc=00 + +# run wave component +export DO_WAVE=YES + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=11 + +# Shared parameters +# Hybrid related +export DOHYBVAR="YES" +export NMEM_ENKF=80 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".false." + +# EnKF output frequency +if [ $DOHYBVAR = "YES" ]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst + +export DONST="YES" +if [ $DONST = "YES" ]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'delz_inc','clwmr_inc','icmr_inc'" + + +# Archiving options +export DELETE_COM_IN_ARCHIVE_JOB=YES +export HPSSARCH="NO" # save data to HPSS archive +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=1 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + + +echo "END: config.base" diff --git a/FV3GFSwfm/cycemcv16/config.earc b/FV3GFSwfm/cycemcv16/config.earc new file mode 100755 index 0000000000..7cb1de235f --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.earc @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/cycemcv16/config.ecen b/FV3GFSwfm/cycemcv16/config.ecen new file mode 100755 index 0000000000..c9609e3ff8 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.ecen @@ -0,0 +1,21 @@ +#!/bin/ksh -x + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/cycemcv16/config.echgres b/FV3GFSwfm/cycemcv16/config.echgres new file mode 100755 index 0000000000..cbf176c92f --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.echgres @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/cycemcv16/config.ediag b/FV3GFSwfm/cycemcv16/config.ediag new file mode 100755 index 0000000000..192b5d0b48 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.ediag @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/cycemcv16/config.efcs b/FV3GFSwfm/cycemcv16/config.efcs new file mode 100755 index 0000000000..5f7a5bf169 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.efcs @@ -0,0 +1,94 @@ +#!/bin/ksh -x + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# Source model specific information that is resolution dependent +. $EXPDIR/config.fv3 $CASE_ENKF + +# Get task specific resources +. $EXPDIR/config.resources efcs + +export npe_fv3=$npe_efcs + +if [ $QUILTING = ".true." ]; then + export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) + export npe_efcs=$npe_fv3 +fi + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="6 -1" + if [[ "$SDATE" = "$CDATE" ]]; then export restart_interval="3 -1"; fi +fi + +export OUTPUT_FILETYPES="$OUTPUT_FILE" +if [[ "$OUTPUT_FILE" == "netcdf" ]]; then + export ichunk2d=0; export jchunk2d=0 + export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 + RESTILE=`echo $CASE_ENKF |cut -c 2-` + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + if [ $RESTILE -ge 384 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + export ichunk2d=$((4*RESTILE)) + export jchunk2d=$((2*RESTILE)) + export ichunk3d=$((4*RESTILE)) + export jchunk3d=$((2*RESTILE)) + export kchunk3d=1 + else + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi + if [[ "$machine" == "HERA" ]]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + if [ $RESTILE -le 192 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi +fi + +# wave model +export cplwav=.false. + +echo "END: config.efcs" diff --git a/FV3GFSwfm/cycemcv16/config.eobs b/FV3GFSwfm/cycemcv16/config.eobs new file mode 100755 index 0000000000..8fa99c10fb --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.eobs @@ -0,0 +1,31 @@ +#!/bin/ksh -x + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/cycemcv16/config.epos b/FV3GFSwfm/cycemcv16/config.epos new file mode 100755 index 0000000000..441a1ff995 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.epos @@ -0,0 +1,20 @@ +#!/bin/ksh -x + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/cycemcv16/config.esfc b/FV3GFSwfm/cycemcv16/config.esfc new file mode 100755 index 0000000000..53cbb09175 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.esfc @@ -0,0 +1,19 @@ +#!/bin/ksh -x + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/cycemcv16/config.eupd b/FV3GFSwfm/cycemcv16/config.eupd new file mode 100755 index 0000000000..0e9d42e093 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.eupd @@ -0,0 +1,34 @@ +#!/bin/ksh -x + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/cycemcv16/config.fcst b/FV3GFSwfm/cycemcv16/config.fcst new file mode 100755 index 0000000000..0428b7ec56 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.fcst @@ -0,0 +1,350 @@ +#!/bin/ksh -x + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Source model specific information that is resolution dependent +. $EXPDIR/config.fv3 $CASE + +# Get task specific resources +. $EXPDIR/config.resources fcst + +if [ $DONST = "YES" ]; then + . $EXPDIR/config.nsst +fi + +export FORECASTSH="$HOMEgfs/scripts/exglobal_fcst_nemsfv3gfs.sh" +export FCSTEXECDIR="$HOMEgfs/exec" +#export FCSTEXEC="global_fv3gfs.x" +export FCSTEXEC="global_fv3gfs_ccpp.x" +export npe_fv3=$npe_fcst + +if [[ "$CDUMP" == "gfs" ]] ; then + export npe_fv3=$npe_fcst_gfs + export layout_x=$layout_x_gfs + export layout_y=$layout_y_gfs + export WRITE_GROUP=$WRITE_GROUP_GFS + export WRTTASK_PER_GROUP=$WRTTASK_PER_GROUP_GFS +fi + +if [ $QUILTING = ".true." ]; then + export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) + export npe_fcst=$npe_fv3 + export npe_fcst_gfs=$(echo " $npe_fcst_gfs + $WRITE_GROUP_GFS * $WRTTASK_PER_GROUP_GFS" | bc) +fi + +if [ $DO_WAVE = "YES" ] ; then + export npe_fcst=$((npe_fcst + npe_wav)) + if [ "$WAVE_CDUMP" = "gfs" -o "$WAVE_CDUMP" = "both" ]; then + export npe_fcst_gfs=$((npe_fcst_gfs + npe_wav_gfs)) + if [ "$CDUMP" = "gfs" ]; then npe_wav=$npe_wav_gfs ; fi + fi +fi + +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +export do_ugwp=".false." +export do_tofd=".true." +export launch_level=$(echo "$LEVS/2.35" |bc) + +# Sponge layer settings for L127 +if [ $LEVS = "128" ]; then + if [ "$CDUMP" = "gdas" ]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 + fi + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export n_sponge=42 + fi +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".true." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi + +tbf="" +if [ $satmedmf = ".true." ]; then tbf="_satmedmf" ; fi + +# Land surface model. (3--RUCLSM, landice=F;) (2--NoahMP, landice=F); (1--Noah, landice=T) +if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export lsm=3 + export lsoil_lsm=9 +else + export lsm=1 +fi + +if [ $lsm -eq 2 -o $lsm -eq 3 ]; then + export lheatstrg=".false." + export landice=".false." +else + export lheatstrg=".false." + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export landice=".false." ##JKH + else + export landice=".true." + fi +fi + +# Radiation options +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export IAER=111 ## JKH + export iovr_lw=1 ## JKH + export iovr_sw=1 ## JKH + export icliq_sw=1 ## JKH +else + export IAER=5111 ;#spectral band mapping method for aerosol optical properties + export iovr_lw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export iovr_sw=3 ;#de-correlation length cloud overlap method (Barker, 2008) + export icliq_sw=2 ;#cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +fi + +# CCPP configuration +export output_1st_tstep_rst=".false." #JKH + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + export imfshalcnv=2 +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [ $imp_physics -eq 99 ]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}" + export nwat=2 + +elif [ $imp_physics -eq 6 ]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}" + export nwat=6 + +elif [ $imp_physics -eq 8 ]; then # Thompson + export nwat=6 + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gsd" + export ltaerosol=.true. + export lradar=.true. + + ## GSD namelist changes + export cal_pre=".false." + export random_clds=".false." + export effr_in=.true. + export ttendlim=0.005 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + else + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson${tbf}" + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + +else + echo "Unknown microphysics option, ABORT!" + +fi +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +export OUTPUT_FILETYPES="$OUTPUT_FILE" +if [[ "$OUTPUT_FILE" == "netcdf" ]]; then + export ichunk2d=0; export jchunk2d=0 + export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 + RESTILE=`echo $CASE |cut -c 2-` + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + if [ $RESTILE -ge 768 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + export ichunk3d=$((4*RESTILE)) + export jchunk3d=$((2*RESTILE)) + export kchunk3d=1 + else + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi + if [[ "$machine" == "HERA" ]]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf_parallel' " + if [ $RESTILE -le 192 ]; then + export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + fi + fi +fi + + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" == "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [ $DOIAU = "YES" ]; then + export restart_interval="6 9" + if [[ "$SDATE" = "$CDATE" ]]; then export restart_interval="3 6"; fi + fi + + # Choose coupling with wave + if [ $DO_WAVE = "YES" ]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" == "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + if [ $CCPP_SUITE = "FV3_GSD_v0" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd_ruc" + elif [ $CCPP_SUITE = "FV3_GSD_noah" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsd" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + fi + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_orig" + fi + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [ $restart_interval_gfs -le 0 ]; then + export restart_interval=0 + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + + # Choose coupling with wave + if [ $DO_WAVE = "YES" -a "$WAVE_CDUMP" != "gdas" ]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + export io_layout="4,4" + + # Set number of layers from the top over + # which two delta-Z filter is applied + export n_sponge=42 + + # Debug load balancing + #export KEEPDATA="YES" + #export ESMF_RUNTIME_PROFILE=ON + #export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY + +fi + + +# Regrid tiles to global Gaussian grid in NEMSIO +export REGRID_NEMSIO_SH="$HOMEgfs/ush/fv3gfs_regrid_nemsio.sh" +if [ $DONST = YES ]; then + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da.txt" +else + export REGRID_NEMSIO_TBL="$HOMEgfs/parm/parm_fv3diag/variable_table_da_nonsst.txt" +fi + +# Remap tiles to global latlon grid in NetCDF +export REMAPSH="$HOMEgfs/ush/fv3gfs_remap.sh" +export master_grid="0p25deg" # 1deg 0p5deg 0p25deg 0p125deg etc +export npe_remap=$((npe_fcst < 240 ? npe_fcst : 240)) + +# Global latlon NetCDF to nemsio utility parameters +export NC2NEMSIOSH="$HOMEgfs/ush/fv3gfs_nc2nemsio.sh" + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/cycemcv16/config.fv3 b/FV3GFSwfm/cycemcv16/config.fv3 new file mode 100755 index 0000000000..7006374d36 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.fv3 @@ -0,0 +1,156 @@ +#!/bin/ksh -x + +########## config.fv3 ########## +# FV3 model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3 for a given resolution +# User can over-ride after sourcing this config file + +if [ $# -ne 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "C48 C96 C192 C384 C768 C1152 C3072" + exit 1 + +fi + +case_in=$1 + +echo "BEGIN: config.fv3" + + +if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 +elif [[ "$machine" = "WCOSS_C" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "THEIA" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "JET" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "HERA" ]]; then + export npe_node_max=40 +fi + + +# (Standard) Model resolution dependent variables +case $case_in in + "C48") + export DELTIM=450 + export layout_x=2 + export layout_y=4 + export layout_x_gfs=2 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C96") + export DELTIM=450 + export layout_x=4 + export layout_y=4 + export layout_x_gfs=4 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export npe_wav=21 + export npe_wav_gfs=21 + export nth_fv3=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="8M" + ;; + "C384") + export DELTIM=240 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=6 + export layout_y_gfs=6 + export npe_wav=35 + export npe_wav_gfs=35 + export nth_fv3=1 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="16M" + ;; + "C768") + export DELTIM=225 ## JKH + export layout_x=12 + export layout_y=8 + export layout_x_gfs=16 + export layout_y_gfs=12 + export npe_wav=70 + export npe_wav_gfs=70 + export nth_fv3=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTIOBUF="32M" + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTIOBUF="48M" + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "3*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "3*$npe_node_max" |bc) + export WRTIOBUF="64M" + ;; + *) + echo "grid $case_in not supported, ABORT!" + exit 1 + ;; +esac + +echo "END: config.fv3" diff --git a/FV3GFSwfm/cycemcv16/config.fv3ic b/FV3GFSwfm/cycemcv16/config.fv3ic new file mode 100755 index 0000000000..eaed3892ea --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.fv3ic @@ -0,0 +1,19 @@ +#!/bin/ksh -x + +########## config.fv3ic ########## +# Convert GFS initial conditions into FV3 initial conditions + +echo "BEGIN: config.fv3ic" + +# Task and thread configuration +export wtime_fv3ic="00:30:00" +export npe_fv3ic=1 +export npe_node_fv3ic=1 +export nth_fv3ic=${NTHREADS_CHGRES:-24} +if [ $machine = HERA ]; then + export npe_fv3ic=4 + export npe_node_fv3ic=4 + export nth_fv3ic=1 +fi + +echo "END: config.fv3ic" diff --git a/FV3GFSwfm/cycemcv16/config.gempak b/FV3GFSwfm/cycemcv16/config.gempak new file mode 100755 index 0000000000..647f474e90 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.gempak @@ -0,0 +1,13 @@ +#!/bin/ksh -x + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +export GEMPAKSH=$HOMEgfs/jobs/JGFS_GEMPAK + +echo "END: config.gempak" diff --git a/FV3GFSwfm/cycemcv16/config.getic b/FV3GFSwfm/cycemcv16/config.getic new file mode 100755 index 0000000000..a754454264 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.getic @@ -0,0 +1,20 @@ +#!/bin/ksh -x + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +# We should just be supporting the OPSGFS only +export ics_from="opsgfs" # initial conditions from opsgfs or pargfs + +# Provide a parallel experiment name and path to HPSS archive +if [ $ics_from = "pargfs" ]; then + export parexp="prnemsrn" + export HPSS_PAR_PATH="/5year/NCEPDEV/emc-global/emc.glopara/WCOSS_C/$parexp" +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/cycemcv16/config.gldas b/FV3GFSwfm/cycemcv16/config.gldas new file mode 100755 index 0000000000..456d205c9b --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.gldas @@ -0,0 +1,16 @@ +#!/bin/ksh -x + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR + + +echo "END: config.gldas" diff --git a/FV3GFSwfm/cycemcv16/config.metp b/FV3GFSwfm/cycemcv16/config.metp new file mode 100755 index 0000000000..aaea79db62 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.metp @@ -0,0 +1,69 @@ +#!/bin/ksh -x + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. $EXPDIR/config.resources metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus, Verify grid-to-grid, and/or grid-to-obs, and/or precipitation options +#---------------------------------------------------------- + +if [ "$CDUMP" = "gfs" ] ; then + if [ $RUN_GRID2GRID_STEP1 = "YES" -o $RUN_GRID2OBS_STEP1 = "YES" -o $RUN_PRECIP_STEP1 = "YES" ]; then + export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd + export VERIF_GLOBALSH=$HOMEverif_global/ush/run_verif_global_in_global_workflow.sh + ## INPUT DATA SETTINGS + export model_list=$PSLOT + export model_data_dir_list=$ARCDIR/.. + export model_fileformat_list="pgbf{lead?fmt=%H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" + export model_hpssdir_list=$ATARDIR/.. + export get_data_from_hpss="NO" + export hpss_walltime="10" + ## OUTPUT SETTINGS + export OUTPUTROOT=$RUNDIR/$CDUMP/$CDATE/vrfy/metplus_exp + export model_arch_dir_list=$ARCDIR/.. + export make_met_data_by="VALID" + export gather_by="VSDB" + ## DATE SETTINGS + export VRFYBACK_HRS="24" + ## METPLUS SETTINGS + export METplus_verbosity="INFO" + export MET_verbosity="2" + export log_MET_output_to_METplus="yes" + ## FORECAST VERIFICATION SETTINGS + export fhr_min=$FHMIN_GFS + export fhr_max=$FHMAX_GFS + # GRID-TO-GRID STEP 1 + export g2g1_type_list="anom pres sfc" + export g2g1_anl_name="self_anl" + export g2g1_anl_fileformat_list="pgbanl.gfs.{valid?fmt=%Y%m%d%H}.grib2" + export g2g1_grid="G002" + # GRID-TO-OBS STEP 1 + export g2o1_type_list="upper_air conus_sfc" + export g2o1_obtype_upper_air="ADPUPA" + export g2o1_grid_upper_air="G003" + export g2o1_fhr_out_upper_air="6" + export g2o1_obtype_conus_sfc="ONLYSF ADPUPA" + export g2o1_grid_conus_sfc="G104" + export g2o1_fhr_out_conus_sfc="3" + export g2o1_prepbufr_data_runhpss="YES" + # PRECIP STEP 1 + export precip1_obtype="ccpa" + export precip1_accum_length="24" + export precip1_model_bucket_list="06" + export precip1_model_varname_list="APCP" + export precip1_model_fileformat_list="pgbf{lead?fmt=%H}.gfs.{init?fmt=%Y%m%d%H}.grib2" + export precip1_grid="G211" + fi +fi + +echo "END: config.metp" diff --git a/FV3GFSwfm/cycemcv16/config.nsst b/FV3GFSwfm/cycemcv16/config.nsst new file mode 100755 index 0000000000..0bf2792474 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.nsst @@ -0,0 +1,45 @@ +#!/bin/ksh -x + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export NST_SPINUP=1 +else + export NST_SPINUP=0 +fi +#if [[ "$CDATE" = $SDATE ]]; then +# export NST_SPINUP=1 +#fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export NST_RESV=1 +else + export NST_RESV=0 +fi + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +if [[ "$CCPP_SUITE" == "FV3_GSD_v0" || "$CCPP_SUITE" == "FV3_GSD_noah" ]] ; then + export ZSEA2=5 +else + export ZSEA2=0 +fi + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/cycemcv16/config.post b/FV3GFSwfm/cycemcv16/config.post new file mode 100755 index 0000000000..4e33da0cbe --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.post @@ -0,0 +1,46 @@ +#!/bin/ksh -x + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=binarynemsiompiio +if [ $OUTPUT_FILE = "netcdf" ]; then + export MODEL_OUT_FORM=netcdfpara +fi + +# Post driver job that calls gfs_nceppost.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_NCEPPOST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_nceppost.sh" +export POSTGPEXEC="$HOMEpost/exec/gfs_ncep_post" +export GOESF=YES # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [ $machine = "WCOSS_DELL_P3" ]; then + export npe_dwn=28 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/cycemcv16/config.postsnd b/FV3GFSwfm/cycemcv16/config.postsnd new file mode 100755 index 0000000000..9c68e5284e --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.postsnd @@ -0,0 +1,13 @@ +#!/bin/ksh -x + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_POSTSND + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/cycemcv16/config.prep b/FV3GFSwfm/cycemcv16/config.prep new file mode 100755 index 0000000000..c9dbe8743a --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.prep @@ -0,0 +1,55 @@ +#!/bin/ksh -x + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump + + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/extropcy_qc_reloc.sh" +export SENDCOM=YES + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/fix_am +export USHRELO=${HOMERELO}/ush + + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for start of GFS v16 parallels +if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 +fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +#NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +##if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +## export PRVT=$EXPDIR/prepobs_errtable.global +##fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/cycemcv16/config.prepbufr b/FV3GFSwfm/cycemcv16/config.prepbufr new file mode 100755 index 0000000000..c90a732c41 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.prepbufr @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.prepbufr ########## +# PREPBUFR specific configuration + +echo "BEGIN: config.prepbufr" + +# Get task specific resources +. $EXPDIR/config.resources prepbufr + +# Set variables + +if [ $machine = "HERA" ]; then + export GESROOT=/scratch1/NCEPDEV/rstprod # set by module prod_envir on WCOSS_C +fi + +echo "END: config.prepbufr" diff --git a/FV3GFSwfm/cycemcv16/config.resources b/FV3GFSwfm/cycemcv16/config.resources new file mode 100755 index 0000000000..0ca8ca03a1 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.resources @@ -0,0 +1,395 @@ +#!/bin/ksh -x + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 + if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 + export npe_node_max=40 + fi +elif [[ "$machine" = "WCOSS_C" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "JET" ]]; then + export npe_node_max=24 +elif [[ "$machine" = "HERA" ]]; then + export npe_node_max=40 +fi + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=4" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=10 + export nth_waveinit=1 + export npe_node_waveinit=$(echo "$npe_node_max / $nth_waveinit" | bc) + export NTASKS=${npe_waveinit} + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:30:00" + export npe_waveprep=65 + export nth_waveprep=1 + export npe_node_waveprep=$(echo "$npe_node_max / $nth_waveprep" | bc) + export NTASKS=${npe_waveprep} + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="06:00:00" + export npe_wavepostsbs=10 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$(echo "$npe_node_max / $nth_wavepostsbs" | bc) + export NTASKS=${npe_wavepostsbs} + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="03:00:00" + export npe_wavepostbndpnt=56 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=$(echo "$npe_node_max / $nth_wavepostbndpnt" | bc) + export NTASKS=${npe_wavepostbndpnt} + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="03:00:00" + export npe_wavepostpnt=56 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=$(echo "$npe_node_max / $nth_wavepostpnt" | bc) + export NTASKS=${npe_wavepostpnt} + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="01:00:00" + export npe_wavegempak=$npe_node_max + export nth_wavegempak=1 + export npe_node_wavegempak=$(echo "$npe_node_max / $nth_wavegempak" | bc) + export NTASKS=${npe_wavegempak} + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:30:00" + export npe_waveawipsbulls=$npe_node_max + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=${npe_waveawipsbulls} + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="00:30:00" + export npe_waveawipsgridded=$npe_node_max + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=${npe_waveawipsgridded} + +elif [ $step = "anal" ]; then + + export wtime_anal="02:00:00" + export npe_anal=800 + export nth_anal=4 + if [ $CASE = "C384" ]; then + export npe_anal=160 + export nth_anal=10 + fi + if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export npe_anal=84; fi + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_anal=7; fi + export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) + export nth_cycle=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_anal="3072M"; fi + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="02:00:00" + export npe_analcalc=127 + export nth_analcalc=1 + export npe_node_analcalc=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="02:00:00" + export npe_analdiag=112 + export nth_analdiag=1 + export npe_node_analdiag=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_analdiag="3072M"; fi + +elif [ $step = "gldas" ]; then + + export wtime_gldas="02:00:00" + export npe_gldas=96 + export nth_gldas=1 + export npe_node_gldas=$npe_node_max + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=24 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_gldas=112 ; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_gldas="3072M"; fi + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:00:00" + export wtime_fcst_gfs="08:00:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi + +elif [ $step = "post" ]; then + + export wtime_post="02:00:00" + export wtime_post_gfs="06:00:00" + export npe_post=48 + export nth_post=1 + export npe_node_post=12 + export npe_node_dwn=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=14 ; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_post="3072M"; fi + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=1 + export nth_wafs=1 + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=1 + export nth_wafsgcip=1 + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=1 + export npe_node_wafsgrib2=1 + export nth_wafsgrib2=1 + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=1 + export nth_wafsblending=1 + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=1 + export npe_node_wafsgrib20p25=1 + export nth_wafsgrib20p25=1 + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=1 + export nth_wafsblending0p25=1 + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_vrfy="3072M" + elif [[ "$machine" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_metp="3072M" + elif [[ "$machine" == "THEIA" ]]; then + export memory_metp="16384M" + fi + +elif [ $step = "echgres" ]; then + + export wtime_echgres="01:00:00" + export npe_echgres=3 + export nth_echgres=$npe_node_max + export npe_node_echgres=1 + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=2048M" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:30:00" + export wtime_eomg="01:00:00" + if [ $CASE = "C768" ]; then + export npe_eobs=100 + elif [ $CASE = "C384" ]; then + export npe_eobs=42 + elif [ $CASE = "C192" ]; then + export npe_eobs=28 + elif [ $CASE = "C96" -o $CASE = "C48" ]; then + export npe_eobs=14 + fi + export nth_eobs=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi + export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_eobs="3072M"; fi + +elif [ $step = "ediag" ]; then + + export wtime_ediag="02:00:00" + export npe_ediag=56 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + if [[ "$machine" == "WCOSS_C" ]]; then export memory_ediag="3072M"; fi + +elif [ $step = "eupd" ]; then + + export wtime_eupd="01:30:00" + if [ $CASE = "C768" ]; then + export npe_eupd=540 + export nth_eupd=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export nth_eupd=9 + fi + elif [ $CASE = "C384" ]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export nth_eupd=9 + fi + if [[ "$machine" = "HERA" ]]; then + export npe_eupd=84 + export nth_eupd=10 + fi + elif [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then + export npe_eupd=42 + export nth_eupd=2 + fi + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then + export memory_eupd="3072M" + fi + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:30:00" + export npe_ecen=80 + export nth_ecen=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi + if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + if [[ "$machine" == "WCOSS_C" ]]; then export memory_ecen="3072M"; fi + +elif [ $step = "esfc" ]; then + + export wtime_esfc="03:00:00" + export npe_esfc=80 + export npe_node_esfc=$npe_node_max + export nth_esfc=1 + export nth_cycle=$nth_esfc + if [[ "$machine" == "WCOSS_C" ]]; then export memory_esfc="3072M"; fi + +elif [ $step = "efcs" ]; then + + export wtime_efcs="03:00:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_efcs="254M"; fi + +elif [ $step = "epos" ]; then + + export wtime_epos="03:00:00" + export npe_epos=80 + export nth_epos=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_epos="254M"; fi + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=1 + export npe_node_postsnd=4 + export npe_postsndcfp=9 + export npe_node_postsndcfp=3 + if [ $OUTPUT_FILE == "nemsio" ]; then + export npe_postsnd=13 + export npe_node_postsnd=4 + fi + if [[ "$machine" = "HERA" ]]; then export npe_node_postsnd=2; fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_postsnd="254M"; fi + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=4 + export npe_node_awips=4 + export nth_awips=2 + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + export npe_awips=2 + export npe_node_awips=2 + export nth_awips=1 + fi + if [[ "$machine" == "WCOSS_C" ]]; then export memory_awips="2048M"; fi + +elif [ $step = "gempak" ]; then + + export wtime_gempak="02:00:00" + export npe_gempak=17 + export npe_node_gempak=4 + export nth_gempak=3 + if [[ "$machine" == "WCOSS_C" ]]; then export memory_gempak="254M"; fi + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/cycemcv16/config.vrfy b/FV3GFSwfm/cycemcv16/config.vrfy new file mode 100755 index 0000000000..796e293008 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.vrfy @@ -0,0 +1,193 @@ +#!/bin/ksh -x + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. $EXPDIR/config.resources vrfy + +export VDUMP="gfs" # Verifying dump +export CDUMPFCST="gdas" # Fit-to-obs with GDAS/GFS prepbufr +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis + +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYFITS="YES" # Fit to observations +export VSDB_STEP1="YES" # Populate VSDB database +export VSDB_STEP2="NO" +export VRFYG2OBS="YES" # Grid to observations, see note below if turning ON +export VRFYPRCP="YES" # Precip threat scores +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#------------------------------------------------- +# Fit to Observations +#------------------------------------------------- + +if [ $VRFYFITS = "YES" ]; then + + export PRVT=$HOMEgfs/fix/fix_gsi/prepobs_errtable.global + export HYBLEVS=$HOMEgfs/fix/fix_am/global_hyblev.l${LEVS}.txt + export CUE2RUN=$QUEUE + + export VBACKUP_FITS=24 + + export CONVNETC="NO" + if [ ${netcdf_diag:-".false."} = ".true." ]; then + export CONVNETC="YES" + fi + + if [ $machine = "WCOSS_C" ]; then + export fitdir="$BASE_GIT/verif/global/parafits.fv3nems/batrun" + export PREPQFITSH="$fitdir/subfits_cray_nems" + elif [ $machine = "WCOSS_DELL_P3" ]; then + export fitdir="$BASE_GIT/verif/global/Fit2Obs/ncf-vqc/batrun" + export PREPQFITSH="$fitdir/subfits_dell_nems" + elif [ $machine = "HERA" ]; then + #export fitdir="$BASE_GIT/Fit2Obs/batrun" + export fitdir="$BASE_GIT/verif/global/Fit2Obs/ncf-vqc/batrun" + export PREPQFITSH="$fitdir/subfits_hera_slurm" + fi + +fi + + +#---------------------------------------------------------- +# VSDB STEP1, Verify Precipipation and Grid To Obs options +#---------------------------------------------------------- +# All these call $VSDBSH + +if [ "$CDUMP" = "gfs" ] ; then + ddd=`echo $CDATE |cut -c 1-8` + #if [ $ddd -eq 5 -o $ddd -eq 10 ]; then export VSDB_STEP2 = "YES" ;fi + + if [ $VSDB_STEP1 = "YES" -o $VSDB_STEP2 = "YES" -o $VRFYPRCP = "YES" -o $VRFYG2OBS = "YES" ]; then + export BACKDATEVSDB=24 # execute vsdbjob for the previous day + export VBACKUP_PRCP=24 # back up for QPF verification data + export vsdbsave="$NOSCRUB/archive/vsdb_data" # place to save vsdb database + export vsdbhome=$BASE_VERIF # location of global verification scripts + export VSDBSH="$vsdbhome/vsdbjob.sh" # VSDB job script + export vlength=$FHMAX_GFS # verification length + export vhr_rain=$FHMAX_GFS # verification length for precip + export ftyplist="pgbq" # verif. files used for computing QPF ETS scores + export ptyplist="PRATE" # precip types in GRIB: PRATE or APCP + export anltype="gfs" # default=gfs, analysis type (gfs or gdas) for verification + export rain_bucket=6 # prate in pgb files is 6-hr accumulated + + export VSDB_START_DATE="$SDATE" # starting date for vsdb maps + export webhost="emcrzdm.ncep.noaa.gov" # webhost(rzdm) computer + export webhostid="$USER" # webhost(rzdm) user name + export SEND2WEB="NO" # whether or not to send maps to webhost + export WEBDIR="/home/people/emc/www/htdocs/gmb/${webhostid}/vsdb/$PSLOT" + export mdlist="gfs $PSLOT " # exps (up to 10) to compare in maps + fi +fi + + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [ $VRFYRAD = "YES" -o $VRFYMINMON = "YES" -o $VRFYOZN = "YES" ]; then + + export envir="para" + + # Radiance Monitoring + if [[ "$VRFYRAD" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + + export RADMON_SUFFIX=$PSLOT + export TANKverf="$NOSCRUB/monitor/radmon" + export VRFYRADSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "$VRFYMINMON" = "YES" ]] ; then + + export MINMON_SUFFIX=$PSLOT + export M_TANKverf="$NOSCRUB/monitor/minmon" + if [[ "$CDUMP" = "gdas" ]] ; then + export VRFYMINSH="$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "$CDUMP" = "gfs" ]] ; then + export VRFYMINSH="$HOMEgfs/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "$VRFYOZN" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + + export HOMEgfs_ozn="$HOMEgfs" + export OZNMON_SUFFIX=$PSLOT + export TANKverf_ozn="$NOSCRUB/monitor/oznmon" + export VRFYOZNSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=v1.1.15.1 +if [ $machine = "WCOSS_DELL_P3" ] ; then + export ens_tracker_ver=v1.1.15.3 +fi +export HOMEens_tracker=$BASE_GIT/tracker/ens_tracker.${ens_tracker_ver} + + +if [ "$VRFYTRAK" = "YES" ]; then + + export TRACKERSH="$HOMEgfs/jobs/JGFS_CYCLONE_TRACKER" + if [ "$CDUMP" = "gdas" ]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=$FHMAX + else + export FHOUT_CYCLONE=6 + export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + fi + if [ $machine = "HERA" ]; then + export COMROOTp1="/scratch1/NCEPDEV/global/glopara/com" + export COMINsyn=${COMINsyn:-${COMROOTp1}/gfs/prod/syndat} + else + export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} + fi +fi + + +if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then + + export GENESISSH="$HOMEgfs/jobs/JGFS_CYCLONE_GENESIS" +fi + +if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then + + export GENESISFSU="$HOMEgfs/jobs/JGFS_FSU_GENESIS" +fi + +if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then + + if [ $machine = "WCOSS_C" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.cray" + elif [ $machine = "WCOSS_DELL_P3" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.dell" + elif [ $machine = "HERA" ] ; then + export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on $machine!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/cycemcv16/config.wafs b/FV3GFSwfm/cycemcv16/config.wafs new file mode 100755 index 0000000000..dafded38a1 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafs @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export fcsthrs=120 +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/cycemcv16/config.wafsblending b/FV3GFSwfm/cycemcv16/config.wafsblending new file mode 100755 index 0000000000..dfd7d1715e --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafsblending @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/cycemcv16/config.wafsblending0p25 b/FV3GFSwfm/cycemcv16/config.wafsblending0p25 new file mode 100755 index 0000000000..28a2de90ff --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/cycemcv16/config.wafsgcip b/FV3GFSwfm/cycemcv16/config.wafsgcip new file mode 100755 index 0000000000..5c166a541b --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafsgcip @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/cycemcv16/config.wafsgrib2 b/FV3GFSwfm/cycemcv16/config.wafsgrib2 new file mode 100755 index 0000000000..27b137cd8c --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafsgrib2 @@ -0,0 +1,15 @@ +#!/bin/ksh -x + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/cycemcv16/config.wafsgrib20p25 b/FV3GFSwfm/cycemcv16/config.wafsgrib20p25 new file mode 100755 index 0000000000..8b55333c00 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/cycemcv16/config.wave b/FV3GFSwfm/cycemcv16/config.wave new file mode 100755 index 0000000000..df37ff49d3 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wave @@ -0,0 +1,121 @@ +#!/bin/ksh -x + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${CDUMP}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD='gnh_10m aoc_9km gsh_15m' +export waveGRDN='1 2 3' # gridnumber for ww3_multi +export waveGRDG='10 20 30' # gridgroup for ww3_multi + +# ESMF input grid +export waveesmfGRD='glox_10m' # input grid + +# Grids for input fields +export WAVEICE_DID=sice +export WAVEICE_FID=glix_10m +export WAVECUR_DID=rtofs +export WAVECUR_FID=glix_10m +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=points +export waveinterpGRD='glo_15mxt' # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavesbsGRD='' # side-by-side grids generated as wave model runs, writes to com +export wavepostGRD='gnh_10m aoc_9km gsh_15m' # Native grids that will be post-processed (grib2) + +# CDATE +export CDATE=${PDY}${cyc} + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} + +# gridded and point output rate +export DTFLD_WAV=`expr $FHOUT_HF_WAV \* 3600` +export DTPNT_WAV=3600 +export FHINCP_WAV=`expr $DTPNT_WAV / 3600` + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + WAVNCYC=4 + WAVHCYC=6 + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days + WAVHCYC=`expr 24 / ${gfs_cyc}` +else + WAVHCYC=0 + FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi +export FHMAX_WAV_CUR WAVHCYC WAVNCYC + +# Restart timing business +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export RSTTYPE_WAV='T' # generate second tier of restart files + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + export RSTTYPE_WAV='F' # generate second tier of restart files + if [ $rst_dt_gfs -gt 0 ]; then export RSTTYPE_WAV='T' ; fi + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=`echo $RUNMEM | grep -o '..$'` +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +export WW3ICEINP='YES' +export WW3CURINP='YES' + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +echo "END: config.wave" diff --git a/FV3GFSwfm/cycemcv16/config.waveawipsbulls b/FV3GFSwfm/cycemcv16/config.waveawipsbulls new file mode 100755 index 0000000000..ec39bfb646 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.waveawipsbulls @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/cycemcv16/config.waveawipsgridded b/FV3GFSwfm/cycemcv16/config.waveawipsgridded new file mode 100755 index 0000000000..7f2972bb24 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.waveawipsgridded @@ -0,0 +1,17 @@ +#!/bin/ksh -x + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/cycemcv16/config.wavegempak b/FV3GFSwfm/cycemcv16/config.wavegempak new file mode 100755 index 0000000000..ec1f59d25c --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wavegempak @@ -0,0 +1,16 @@ +#!/bin/ksh -x + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" +export COMPONENT=${COMPONENT:-wave} +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT/gempak" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/cycemcv16/config.waveinit b/FV3GFSwfm/cycemcv16/config.waveinit new file mode 100755 index 0000000000..93960e5e25 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.waveinit @@ -0,0 +1,14 @@ +#!/bin/ksh -x + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/cycemcv16/config.wavepostbndpnt b/FV3GFSwfm/cycemcv16/config.wavepostbndpnt new file mode 100755 index 0000000000..5ec96a697f --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/cycemcv16/config.wavepostpnt b/FV3GFSwfm/cycemcv16/config.wavepostpnt new file mode 100755 index 0000000000..276ca230a6 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wavepostpnt @@ -0,0 +1,11 @@ +#!/bin/ksh -x + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/cycemcv16/config.wavepostsbs b/FV3GFSwfm/cycemcv16/config.wavepostsbs new file mode 100755 index 0000000000..b9051776e3 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.wavepostsbs @@ -0,0 +1,28 @@ +#!/bin/ksh -x + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="gnh_10m" +export WAV_SUBGRB="WAV_ATLO_GRB WAV_EPAC_GRB WAV_WCST_GRB" +export WAV_ATLO_GRB="0 6 0 0 0 0 0 0 301 331 0 0 55000000 260000000 48 0 310000000 166667 166667 0 atlocn 0p16" +export WAV_EPAC_GRB="0 6 0 0 0 0 0 0 511 301 0 0 30000002 130000000 48 -20000000 215000000 166667 166667 0 epacif 0p16" +export WAV_WCST_GRB="0 6 0 0 0 0 0 0 241 151 0 0 50000000 210000000 48 25000000 250000000 166667 166667 0 wcoast 0p16" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/cycemcv16/config.waveprep b/FV3GFSwfm/cycemcv16/config.waveprep new file mode 100755 index 0000000000..aaf8675283 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/config.waveprep @@ -0,0 +1,46 @@ +#!/bin/ksh -x + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +export HOUR_INC=3 # This value should match with the one used in + # the wind update script +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV='3' +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' + +# Wind interval for standalone file-based runs +# Output stride +export WAV_WND_HOUR_INC=1 # This value should match with the one used in + # the wind update script +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} # Defaults to 3h for GEFSv12 +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} # Constant DT for GFSv16 from getgo +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/cycemcv16/cycemcv16.crontab b/FV3GFSwfm/cycemcv16/cycemcv16.crontab new file mode 100644 index 0000000000..4df2ec1998 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/cycemcv16.crontab @@ -0,0 +1,6 @@ + +#################### cycemcv16 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.2/bin/rocotorun -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.db -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.xml +################################################################# + diff --git a/FV3GFSwfm/cycemcv16/cycemcv16.xml b/FV3GFSwfm/cycemcv16/cycemcv16.xml new file mode 100644 index 0000000000..bc1dc820a2 --- /dev/null +++ b/FV3GFSwfm/cycemcv16/cycemcv16.xml @@ -0,0 +1,1076 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1:ppn=4:tpp=1"> + + + + + 80:ppn=10:tpp=4"> + + + + + 4:ppn=40:tpp=1"> + + + + + 3:ppn=40:tpp=1"> + + + + + + + 81:ppn=10:tpp=4"> + + + + + 4:ppn=12:tpp=1"> + + + + + 3:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + + 5:ppn=20:tpp=2"> + + + + + 2:ppn=40:tpp=1"> + + + + + 90:ppn=6:tpp=6"> + + + + + 3:ppn=1:tpp=40"> + + + + + 14:ppn=6:tpp=6"> + + + + + 2:ppn=40:tpp=1"> + + + + + + 11:ppn=40:tpp=1"> + + + + + 14:ppn=6:tpp=6"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=4:tpp=1"> + + + + + 80:ppn=10:tpp=4"> + + + + + 4:ppn=40:tpp=1"> + + + + + 155:ppn=10:tpp=4"> + + + + + 4:ppn=12:tpp=1"> + + + + + 1:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + + 20:ppn=2:tpp=1"> + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &SDATE; 06:00:00 + &SDATE; &EDATE; 06:00:00 + &SDATE; &EDATE; 06:00:00 + &SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS; + + + + &JOBS_DIR;/prep.sh + + &PSLOT;_gdasprep_@H + &ACCOUNT; + &QUEUE_PREP_GDAS; + &RESOURCES_PREP_GDAS; + &WALLTIME_PREP_GDAS; + + &NATIVE_PREP_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasprep.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + &DMPDIR;/gdas.@Y@m@d/@H/gdas.t@Hz.updated.status.tm00.bufr_d + + + + + + + + + &JOBS_DIR;/anal.sh + + &PSLOT;_gdasanal_@H + &ACCOUNT; + &QUEUE_ANAL_GDAS; + &RESOURCES_ANAL_GDAS; + &WALLTIME_ANAL_GDAS; + + &NATIVE_ANAL_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanal.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + &JOBS_DIR;/analcalc.sh + + &PSLOT;_gdasanalcalc_@H + &ACCOUNT; + &QUEUE_ANALCALC_GDAS; + &RESOURCES_ANALCALC_GDAS; + &WALLTIME_ANALCALC_GDAS; + + &NATIVE_ANALCALC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanalcalc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + + &JOBS_DIR;/analdiag.sh + + &PSLOT;_gdasanaldiag_@H + &ACCOUNT; + &QUEUE_ANALDIAG_GDAS; + &RESOURCES_ANALDIAG_GDAS; + &WALLTIME_ANALDIAG_GDAS; + + &NATIVE_ANALDIAG_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasanaldiag.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loginc.txt + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gdasfcst_@H + &ACCOUNT; + &QUEUE_FCST_GDAS; + &RESOURCES_FCST_GDAS; + &WALLTIME_FCST_GDAS; + + &NATIVE_FCST_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasfcst.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + anl f000 f001 f002 f003 f004 f005 f006 f007 f008 f009 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gdaspost#grp#_@H + &ACCOUNT; + &QUEUE_POST_GDAS; + &RESOURCES_POST_GDAS; + &WALLTIME_POST_GDAS; + + &NATIVE_POST_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaspost#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR&ROTDIR; + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gdasvrfy_@H + &ACCOUNT; + &QUEUE_VRFY_GDAS; + &RESOURCES_VRFY_GDAS; + &WALLTIME_VRFY_GDAS; + &MEMORY_VRFY_GDAS; + &NATIVE_VRFY_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasvrfy.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gdasarch_@H + &ACCOUNT; + &QUEUE_ARCH_GDAS; + &PARTITION_ARCH_GDAS; + &RESOURCES_ARCH_GDAS; + &WALLTIME_ARCH_GDAS; + &MEMORY_ARCH_GDAS; + &NATIVE_ARCH_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasarch.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgdas + PDY@Y@m@d + cyc@H + + + + + &ARCHIVE_TO_HPSS;YES + + + + + + + + &JOBS_DIR;/eobs.sh + + &PSLOT;_gdaseobs_@H + &ACCOUNT; + &QUEUE_EOBS_GDAS; + &RESOURCES_EOBS_GDAS; + &WALLTIME_EOBS_GDAS; + + &NATIVE_EOBS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaseobs.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + &JOBS_DIR;/ediag.sh + + &PSLOT;_gdasediag_@H + &ACCOUNT; + &QUEUE_EDIAG_GDAS; + &RESOURCES_EDIAG_GDAS; + &WALLTIME_EDIAG_GDAS; + + &NATIVE_EDIAG_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasediag.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + &JOBS_DIR;/eupd.sh + + &PSLOT;_gdaseupd_@H + &ACCOUNT; + &QUEUE_EUPD_GDAS; + &RESOURCES_EUPD_GDAS; + &WALLTIME_EUPD_GDAS; + + &NATIVE_EUPD_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdaseupd.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + 000 001 002 + f003 f006 f009 + f003 f006 f009 + + + + &JOBS_DIR;/ecen.sh + + &PSLOT;_gdasecen#grp#_@H + &ACCOUNT; + &QUEUE_ECEN_GDAS; + &RESOURCES_ECEN_GDAS; + &WALLTIME_ECEN_GDAS; + + &NATIVE_ECEN_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasecen#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + + + &JOBS_DIR;/esfc.sh + + &PSLOT;_gdasesfc_@H + &ACCOUNT; + &QUEUE_ESFC_GDAS; + &RESOURCES_ESFC_GDAS; + &WALLTIME_ESFC_GDAS; + + &NATIVE_ESFC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasesfc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.loganl.txt + + + + + + + + + + + 01 02 03 04 05 06 07 08 09 10 + + + + &JOBS_DIR;/efcs.sh + + &PSLOT;_gdasefcs#grp#_@H + &ACCOUNT; + &QUEUE_EFCS_GDAS; + &RESOURCES_EFCS_GDAS; + &WALLTIME_EFCS_GDAS; + + &NATIVE_EFCS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasefcs#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + + + + + + + &JOBS_DIR;/echgres.sh + + &PSLOT;_gdasechgres_@H + &ACCOUNT; + &QUEUE_ECHGRES_GDAS; + &RESOURCES_ECHGRES_GDAS; + &WALLTIME_ECHGRES_GDAS; + + &NATIVE_ECHGRES_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasechgres.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + + + + + + + + + + + + + 000 001 002 003 004 005 006 + f003 f004 f005 f006 f007 f008 f009 + f003 f004 f005 f006 f007 f008 f009 + + + + &JOBS_DIR;/epos.sh + + &PSLOT;_gdasepos#grp#_@H + &ACCOUNT; + &QUEUE_EPOS_GDAS; + &RESOURCES_EPOS_GDAS; + &WALLTIME_EPOS_GDAS; + + &NATIVE_EPOS_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasepos#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + FHRGRP#grp# + FHRLST#lst# + + + + + + + + + + + + 00 01 02 + + + + &JOBS_DIR;/earc.sh + + &PSLOT;_gdasearc#grp#_@H + &ACCOUNT; + &QUEUE_EARC_GDAS; + &PARTITION_EARC_GDAS; + &RESOURCES_EARC_GDAS; + &WALLTIME_EARC_GDAS; + &MEMORY_EARC_GDAS; + &NATIVE_EARC_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/gdasearc#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + CDUMPgdas + ENSGRP#grp# + + + + + + + + + + + + &JOBS_DIR;/prep.sh + + &PSLOT;_gfsprep_@H + &ACCOUNT; + &QUEUE_PREP_GFS; + &RESOURCES_PREP_GFS; + &WALLTIME_PREP_GFS; + + &NATIVE_PREP_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsprep.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc + &DMPDIR;/gfs.@Y@m@d/@H/gfs.t@Hz.updated.status.tm00.bufr_d + + + + + + + + &JOBS_DIR;/anal.sh + + &PSLOT;_gfsanal_@H + &ACCOUNT; + &QUEUE_ANAL_GFS; + &RESOURCES_ANAL_GFS; + &WALLTIME_ANAL_GFS; + + &NATIVE_ANAL_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsanal.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + + &JOBS_DIR;/analcalc.sh + + &PSLOT;_gfsanalcalc_@H + &ACCOUNT; + &QUEUE_ANALCALC_GFS; + &RESOURCES_ANALCALC_GFS; + &WALLTIME_ANALCALC_GFS; + + &NATIVE_ANALCALC_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsanalcalc.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.loginc.txt + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + &ACCOUNT; + &QUEUE_FCST_GFS; + &RESOURCES_FCST_GFS; + &WALLTIME_FCST_GFS; + + &NATIVE_FCST_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + + + 000 001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020 021 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + anl f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + &ACCOUNT; + &QUEUE_POST_GFS; + &RESOURCES_POST_GFS; + &WALLTIME_POST_GFS; + + &NATIVE_POST_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + ROTDIR&ROTDIR; + + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + &ACCOUNT; + &QUEUE_VRFY_GFS; + &RESOURCES_VRFY_GFS; + &WALLTIME_VRFY_GFS; + &MEMORY_VRFY_GFS; + &NATIVE_VRFY_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/postsnd.sh + + &PSLOT;_gfspostsnd_@H + &ACCOUNT; + &QUEUE_POSTSND_GFS; + &RESOURCES_POSTSND_GFS; + &WALLTIME_POSTSND_GFS; + + &NATIVE_POSTSND_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfspostsnd.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + &ACCOUNT; + &QUEUE_ARCH_GFS; + &PARTITION_ARCH_GFS; + &RESOURCES_ARCH_GFS; + &WALLTIME_ARCH_GFS; + &MEMORY_ARCH_GFS; + &NATIVE_ARCH_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + SLURM_SETYES + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + + + + &ARCHIVE_TO_HPSS;YES + + + + + + + + sleep 1 + + &PSLOT;_firstcyc_@H + &ACCOUNT; + &QUEUE; + &PARTITION_ARCH; + &RESOURCES_ARCH_GDAS; + &WALLTIME_ARCH_GDAS; + &NATIVE_ARCH_GDAS; + + &ROTDIR;/logs/@Y@m@d@H/firstcyc.log + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + + + + + diff --git a/FV3GFSwfm/cycemcv16/logs/jlogfile b/FV3GFSwfm/cycemcv16/logs/jlogfile new file mode 100644 index 0000000000..c1aa286d9a --- /dev/null +++ b/FV3GFSwfm/cycemcv16/logs/jlogfile @@ -0,0 +1,45 @@ +10/02 07:50:00Z efcs.61055-HAS BEGUN on h2c19 +10/02 08:24:58Z efcs.218369-HAS BEGUN on h3c54 +10/02 08:25:03Z efcs.259420-HAS BEGUN on h1c02 +10/02 08:26:08Z fcst.292736-HAS BEGUN on h1c49 +10/02 08:26:17Z efcs.305370-HAS BEGUN on h1c19 +10/02 08:26:27Z efcs.30799-HAS BEGUN on h2c30 +10/02 08:26:41Z efcs.80733-HAS BEGUN on h11c01 +10/02 08:26:47Z efcs.151318-HAS BEGUN on h19c08 +10/02 08:26:51Z efcs.94097-HAS BEGUN on h3c21 +10/02 08:27:01Z efcs.9549-HAS BEGUN on h1c51 +10/02 08:27:09Z efcs.14095-HAS BEGUN on h14c16 +10/02 14:10:43Z efcs.174644-HAS BEGUN on h4c40 +10/02 14:10:44Z efcs.39702-HAS BEGUN on h3c27 +10/02 14:10:44Z efcs.112171-HAS BEGUN on h8c02 +10/02 14:10:44Z efcs.270960-HAS BEGUN on h1c20 +10/02 14:10:45Z efcs.196532-HAS BEGUN on h32m08 +10/02 14:10:46Z efcs.106635-HAS BEGUN on h2c44 +10/02 14:10:54Z efcs.299541-HAS BEGUN on h3c07 +10/02 14:11:52Z efcs.129544-HAS BEGUN on h2c41 +10/02 14:12:12Z efcs.98178-HAS BEGUN on h1c21 +10/02 14:12:23Z efcs.173042-HAS BEGUN on h19c56 +10/02 15:41:15Z efcs.112171-ENDED NORMALLY. +10/02 15:41:58Z efcs.174644-ENDED NORMALLY. +10/02 15:42:40Z efcs.270960-ENDED NORMALLY. +10/02 15:42:41Z efcs.196532-ENDED NORMALLY. +10/02 15:43:01Z efcs.39702-ENDED NORMALLY. +10/02 15:43:27Z efcs.106635-ENDED NORMALLY. +10/02 15:43:39Z efcs.299541-ENDED NORMALLY. +10/02 15:44:26Z efcs.129544-ENDED NORMALLY. +10/02 15:45:03Z efcs.98178-ENDED NORMALLY. +10/02 15:45:57Z efcs.173042-ENDED NORMALLY. +10/02 15:50:44Z epos.220223-HAS BEGUN on h10c26 +10/02 15:52:03Z epos.225919-HAS BEGUN on h5c21 +10/02 15:52:11Z epos.48748-HAS BEGUN on h4c39 +10/02 15:52:46Z epos.301162-HAS BEGUN on h3c05 +10/02 15:52:47Z epos.210953-HAS BEGUN on h11c04 +10/02 15:52:48Z epos.302742-HAS BEGUN on h13c01 +10/02 15:52:48Z epos.1937-HAS BEGUN on h4c01 +10/02 15:54:38Z epos.220223-ENDED NORMALLY. +10/02 15:56:06Z epos.225919-ENDED NORMALLY. +10/02 15:56:15Z epos.48748-ENDED NORMALLY. +10/02 15:56:49Z epos.210953-ENDED NORMALLY. +10/02 15:56:49Z epos.1937-ENDED NORMALLY. +10/02 15:56:51Z epos.302742-ENDED NORMALLY. +10/02 15:56:53Z epos.301162-ENDED NORMALLY. diff --git a/FV3GFSwfm/cycemcv16/run_cmds b/FV3GFSwfm/cycemcv16/run_cmds new file mode 100644 index 0000000000..8a2b1291bd --- /dev/null +++ b/FV3GFSwfm/cycemcv16/run_cmds @@ -0,0 +1,5 @@ +rocotorun -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.xml -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.db +rocotostat -w /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.xml -d /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd_ccpp_v16b_restructure/FV3GFSwfm/cycemcv16/cycemcv16.db | m + + + diff --git a/INFO b/INFO index 4d3ce15906..e07b8bb398 100644 --- a/INFO +++ b/INFO @@ -1,3 +1,73 @@ +Oct 02, 2020 +------------ + - do cycled run with GFS physics (emcv16beta) and GSD physics (GSDnoah) + -- GFS Physics: EnKF forecasts completed successfully + GDAS forecast crashed with floating invalid error + + in calwxtbg, jsta,jend= 261 280 im= 3072 + in calwxtbg,me= 13 iseed= 253654519 + in calwxtbg, jsta,jend= 161 180 im= 3072 + in calwxtbg,me= 8 iseed= 253654519 + forrtl: error (65): floating invalid + Image PC Routine Line Source + global_fv3gfs_ccp 0000000007A7439E Unknown Unknown Unknown + libpthread-2.17.s 00002B269C96F5F0 Unknown Unknown Unknown + global_fv3gfs_ccp 00000000073D9C53 calwxt_bourg_post 109 CALWXT_BOURG.f + global_fv3gfs_ccp 000000000737854B surfce_ 4368 SURFCE.f + global_fv3gfs_ccp 000000000731949B process_ 99 PROCESS.f + global_fv3gfs_ccp 0000000002B4FD91 post_gfs_mp_post_ 197 post_gfs.F90 + + -- GSD Physics: EnKF forecasts failed with floating invalid error + forrtl: error (65): floating invalid + Image PC Routine Line Source + global_fv3gfs_ccp 0000000007A7439E Unknown Unknown Unknown + libpthread-2.17.s 00002B9C436735F0 Unknown Unknown Unknown + global_fv3gfs_ccp 0000000002CED931 module_write_netc 383 module_write_netcdf_parallel.F90 + global_fv3gfs_ccp 0000000002AF05A7 module_wrt_grid_c 1533 module_wrt_grid_comp.F90 + libesmf.so 00002B9C3F2EDED1 _ZN5ESMCI6FTable1 2010 ESMCI_FTable.C + libesmf.so 00002B9C3F2F1AB6 ESMCI_FTableCallE 746 ESMCI_FTable.C + libesmf.so 00002B9C3F7DD78A _ZN5ESMCI2VM5ente 1178 ESMCI_VM.C + libesmf.so 00002B9C3F2EF507 c_esmc_ftablecall 898 ESMCI_FTable.C + + GDAS forecast crashed with floating invalid error + + in calwxtbg, jsta,jend= 261 280 im= 3072 + in calwxtbg,me= 13 iseed= 253654519 + in calwxtbg, jsta,jend= 161 180 im= 3072 + in calwxtbg,me= 8 iseed= 253654519 + forrtl: error (65): floating invalid + Image PC Routine Line Source + global_fv3gfs_ccp 0000000007A7439E Unknown Unknown Unknown + libpthread-2.17.s 00002B269C96F5F0 Unknown Unknown Unknown + global_fv3gfs_ccp 00000000073D9C53 calwxt_bourg_post 109 CALWXT_BOURG.f + global_fv3gfs_ccp 000000000737854B surfce_ 4368 SURFCE.f + global_fv3gfs_ccp 000000000731949B process_ 99 PROCESS.f + global_fv3gfs_ccp 0000000002B4FD91 post_gfs_mp_post_ 197 post_gfs.F90 + + +Oct 01, 2020 +------------ + - added aerosol variables to ICs created from chgres_cube (C768/C384, L64) + +Sep 17, 2020 +============ + - updated to feature/gfsv16b 15Sep20 branch, ce1c782 + (new component restructure) + +Sep 14, 2020 +============ + - updated to feature/gfsv16b 13Aug2020 branch, 9b923ac + - using 13Aug2020 gsd/develop branch, eff83ae + - test cycled run: C768/C384, 40 members, GSDnoah + + get ICs for gdas and enkf + htar -xvf /NCEPPROD/hpssprod/runhistory/rh2019/201909/20190930/gpfs_dell1_nco_ops_com_gfs_prod_gdas.20190930_18.gdas_restart.tar + + ln -s /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsd-ccpp-dev/FV3GFSrun/cycGSD_C768/enkf_18Z_20190930.tar + forach mem ( `seq -f %03g 1 40` ) + tar -xvf tarx enkf_18Z_20190930.tar enkfgdas.20190930/mem${ff} + end + May 19, 2020 ============ - updated to feature/gfsv16b 18May2020 branch, 9f7df9d diff --git a/jobs/rocoto/aeroic.sh_bash b/jobs/rocoto/aeroic.ksh similarity index 75% rename from jobs/rocoto/aeroic.sh_bash rename to jobs/rocoto/aeroic.ksh index d39a8d1bbf..669cea77c7 100755 --- a/jobs/rocoto/aeroic.sh_bash +++ b/jobs/rocoto/aeroic.ksh @@ -1,4 +1,22 @@ -#!/bin/bash +#!/bin/ksh -x + +############################################################### +## Abstract: +## Append aerosol variables, ice_aero and liq_aero, to gfs_data FV3 initial condition files +## RUN_ENVIR : runtime environment (emc | nco) +## HOMEgfs : /full/path/to/workflow +## EXPDIR : /full/path/to/config/files +## CDATE : current date (YYYYMMDDHH) +## CDUMP : cycle name (gdas / gfs) +## PDY : current date (YYYYMMDD) +## cyc : current cycle (HH) +############################################################### + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status ############################################################### # Source relevant configs @@ -11,15 +29,6 @@ done # initialize AERO_DIR=${HOMEgfs}/sorc/aeroconv -module purge -module load intel/18.0.5.274 -module load hdf5/1.10.5 -module load netcdf/4.7.0 -module load grib_api/1.26.1 -module load contrib -module load anaconda/2.3.0 -module load nco/4.7.0 -module load ncl/6.5.0 export LD_PRELOAD=$AERO_DIR/thirdparty/lib/libjpeg.so export PYTHONPATH=$AERO_DIR/thirdparty/lib/python2.7/site-packages:$PYTHONPATH diff --git a/jobs/rocoto/aeroic.sh b/jobs/rocoto/aeroic.sh index 669cea77c7..d548fefd3a 100755 --- a/jobs/rocoto/aeroic.sh +++ b/jobs/rocoto/aeroic.sh @@ -1,22 +1,4 @@ -#!/bin/ksh -x - -############################################################### -## Abstract: -## Append aerosol variables, ice_aero and liq_aero, to gfs_data FV3 initial condition files -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status +#!/bin/bash ############################################################### # Source relevant configs @@ -28,19 +10,30 @@ for config in $configs; do done # initialize -AERO_DIR=${HOMEgfs}/sorc/aeroconv +AERO_DIR=${HOMEgfs}/sorc/aeroconv.fd +module purge +module load intel/18.0.5.274 +module load hdf5/1.10.5 +module load netcdf/4.7.0 +module load grib_api/1.26.1 +module use /contrib/anaconda/modulefiles +module load anaconda/2.3.0 +module load nco/4.7.0 +module load ncl/6.5.0 export LD_PRELOAD=$AERO_DIR/thirdparty/lib/libjpeg.so +export PATH=$AERO_DIR/thirdparty/bin:$PATH +export LD_LIBRARY_PATH=$AERO_DIR/thirdparty/lib:$AERO_DIR/thirdparty/lib64:$LD_LIBRARY_PATH export PYTHONPATH=$AERO_DIR/thirdparty/lib/python2.7/site-packages:$PYTHONPATH module use -a /scratch1/BMC/gmtb/software/modulefiles/intel-18.0.5.274/impi-2018.0.4 module load cdo/1.7.2 +# set case from environment variable passed in!! echo echo "FIXfv3 = $FIXfv3" echo "CDATE = $CDATE" -echo "CDUMP = $CDUMP" -echo "CASE = $CASE" echo "AERO_DIR = $AERO_DIR" +echo "AEROCASE = $AEROCASE" echo "FV3ICS_DIR = $FV3ICS_DIR" echo @@ -49,10 +42,10 @@ export DATA="$DATAROOT/aerofv3ic$$" [[ -d $DATA ]] && rm -rf $DATA mkdir -p $DATA/INPUT cd $DATA - -export OUTDIR="$ICSDIR/$CDATE/$CDUMP/$CASE/INPUT" +echo entering $DATA.... # link files +ln -sf ${AERO_DIR}/thirdparty ln -sf ${FV3ICS_DIR}/gfs_ctrl.nc INPUT ln -sf ${FV3ICS_DIR}/gfs_data.tile1.nc INPUT ln -sf ${FV3ICS_DIR}/gfs_data.tile2.nc INPUT @@ -60,12 +53,12 @@ ln -sf ${FV3ICS_DIR}/gfs_data.tile3.nc INPUT ln -sf ${FV3ICS_DIR}/gfs_data.tile4.nc INPUT ln -sf ${FV3ICS_DIR}/gfs_data.tile5.nc INPUT ln -sf ${FV3ICS_DIR}/gfs_data.tile6.nc INPUT -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile1.nc INPUT/grid_spec.tile1.nc -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile2.nc INPUT/grid_spec.tile2.nc -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile3.nc INPUT/grid_spec.tile3.nc -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile4.nc INPUT/grid_spec.tile4.nc -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile5.nc INPUT/grid_spec.tile5.nc -ln -sf ${FIXfv3}/${CASE}/${CASE}_grid_spec.tile6.nc INPUT/grid_spec.tile6.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile1.nc INPUT/grid_spec.tile1.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile2.nc INPUT/grid_spec.tile2.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile3.nc INPUT/grid_spec.tile3.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile4.nc INPUT/grid_spec.tile4.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile5.nc INPUT/grid_spec.tile5.nc +ln -sf ${FIXfv3}/${AEROCASE}/${AEROCASE}_grid_spec.tile6.nc INPUT/grid_spec.tile6.nc ln -sf ${AERO_DIR}/INPUT/QNWFA_QNIFA_SIGMA_MONTHLY.dat.nc INPUT cp ${AERO_DIR}/int2nc_to_nggps_ic_* ./ @@ -76,8 +69,11 @@ yyyymmdd=`echo $CDATE | cut -c1-8` # Move output data echo "copying updated files to $FV3ICS_DIR...." cp -p $DATA/OUTPUT/gfs*nc $FV3ICS_DIR +status=$? +[[ $status -ne 0 ]] && exit $status +touch ${FV3ICS_DIR}/aero_done ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATA ; fi -exit 0 +#if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATA ; fi +#exit 0 diff --git a/sorc/aero_extract.sh b/sorc/aero_extract.sh index 203b93e6d6..7d5b99d947 100755 --- a/sorc/aero_extract.sh +++ b/sorc/aero_extract.sh @@ -5,7 +5,14 @@ ## ./aero_extract.sh ## -machine=hera +if [[ -d /scratch2 ]] ; then + machine=hera +elif [[ -d /lfs4 ]] ; then + machine=jet +else + echo "Can't run on this platform!" + exit 1 +fi cd aeroconv.fd # thirdparty directory diff --git a/sorc/build_fv3_ccpp_debug.sh b/sorc/build_fv3_ccpp_debug.sh new file mode 100755 index 0000000000..d6f1130788 --- /dev/null +++ b/sorc/build_fv3_ccpp_debug.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash +set -eux + +source ./machine-setup.sh > /dev/null 2>&1 +cwd=`pwd` + +USE_PREINST_LIBS=${USE_PREINST_LIBS:-"true"} +if [ $USE_PREINST_LIBS = true ]; then + export MOD_PATH=/scratch3/NCEPDEV/nwprod/lib/modulefiles +else + export MOD_PATH=${cwd}/lib/modulefiles +fi + +# Check final exec folder exists +if [ ! -d "../exec" ]; then + mkdir ../exec +fi + +if [ $target = hera ]; then target=hera.intel ; fi + +cd fv3gfs_ccpp.fd/ +FV3=$( pwd -P )/FV3 +cd tests/ +./compile.sh "$FV3" "$target" "DEBUG=Y CCPP=Y 32BIT=Y STATIC=Y SUITES=FV3_GFS_v15,FV3_GSD_noah,FV3_GFS_v16beta,FV3_GSD_v0" +mv -f fv3.exe ../NEMS/exe/global_fv3gfs_ccpp.x