diff --git a/CMakeLists.txt b/CMakeLists.txt index 23470e23b..82500be16 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -41,7 +41,6 @@ option(GFS "Enable building GFS-only utilities" OFF) # When building the GFS, the following need not be built if(GFS) message(STATUS "Building utilities specific to the GFS") - set(FRENCTOOLS OFF CACHE BOOL "Disable building fre-nctools.fd" FORCE) set(GRIDTOOLS OFF CACHE BOOL "Disable building grid_tools.fd" FORCE) set(OROG_MASK_TOOLS OFF CACHE BOOL "Disable building orog_mask_tools.fd" FORCE) set(SFC_CLIMO_GEN OFF CACHE BOOL "Disable building sfc_climo_gen.fd" FORCE) diff --git a/README.md b/README.md index fa857f1f8..ce4bbbe07 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ It also uses the following repositories: ## Installing -On Orion, Hercules, Jet, Hera and WCOSS2 do the following: +On Orion, Hercules, Jet, Hera, S4, Gaea and WCOSS2 do the following: 1) Set the 'fixed' directories using the `link_fixdirs.sh` script in `./fix`. Usage: `./link_fixdirs.sh $RUN_ENVIR $machine`, diff --git a/build_all.sh b/build_all.sh index 3d4bb0805..eb57c7b2d 100755 --- a/build_all.sh +++ b/build_all.sh @@ -15,10 +15,11 @@ else readonly DIR_ROOT=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P) fi +source "${DIR_ROOT}/sorc/machine-setup.sh" + # User Options target=${target:-"NULL"} compiler=${compiler:-"intel"} -PW_CSP=${PW_CSP:-} # TODO: This is an implementation from EPIC and consistent with the UFS WM build system. if [[ "$target" == "linux.*" || "$target" == "macosx.*" ]]; then unset -f module @@ -27,17 +28,10 @@ if [[ "$target" == "linux.*" || "$target" == "macosx.*" ]]; then set -x else set +x - source "${DIR_ROOT}/sorc/machine-setup.sh" - if [[ "${target}" == "noaacloud" ]]; then - #TODO: This will need to be revisited once the EPIC supported-stacks come online. - #TODO: This is a hack due to how the spack-stack module files are generated; there may be a better way to do this. - source /contrib/global-workflow/spack-stack/envs/spack_2021.0.3.env - else - module use "${DIR_ROOT}/modulefiles" - fi + module use "${DIR_ROOT}/modulefiles" module load "build.$target.$compiler" > /dev/null module list - set -x + set -x fi # Ensure the submodules have been initialized. diff --git a/driver_scripts/driver_grid.hercules.sh b/driver_scripts/driver_grid.hercules.sh index cd4afee51..c76366005 100644 --- a/driver_scripts/driver_grid.hercules.sh +++ b/driver_scripts/driver_grid.hercules.sh @@ -154,8 +154,8 @@ fi #----------------------------------------------------------------------- export home_dir=$SLURM_SUBMIT_DIR/.. -export TEMP_DIR=/work/noaa/stmp/$LOGNAME/fv3_grid.$gtype -export out_dir=/work/noaa/stmp/$LOGNAME/my_grids +export TEMP_DIR=/work2/noaa/stmp/$LOGNAME/fv3_grid.$gtype +export out_dir=/work2/noaa/stmp/$LOGNAME/my_grids #----------------------------------------------------------------------- # Should not need to change anything below here. diff --git a/fix/link_fixdirs.sh b/fix/link_fixdirs.sh index 00c17a10b..3fb91a1b2 100755 --- a/fix/link_fixdirs.sh +++ b/fix/link_fixdirs.sh @@ -9,7 +9,7 @@ set -ex # 'nco' (copies data). # # $machine - is the machine. Choices are: -# 'wcoss2', 'hera', 'jet', 'orion', 'hercules', 's4' +# 'wcoss2', 'hera', 'jet', 'orion', 'hercules', 's4', 'gaea' RUN_ENVIR=${1} machine=${2} @@ -17,7 +17,7 @@ machine=${2} if [ $# -lt 2 ]; then set +x echo '***ERROR*** must specify two arguements: (1) RUN_ENVIR, (2) machine' - echo ' Syntax: link_fv3gfs.sh ( nco | emc ) ( wcoss2 | hera | jet | orion | hercules | s4 )' + echo ' Syntax: link_fv3gfs.sh ( nco | emc ) ( wcoss2 | hera | jet | orion | hercules | s4 | gaea )' exit 1 fi @@ -28,10 +28,10 @@ if [ $RUN_ENVIR != emc -a $RUN_ENVIR != nco ]; then exit 1 fi -if [ $machine != wcoss2 -a $machine != hera -a $machine != jet -a $machine != orion -a $machine != s4 -a $machine != hercules ]; then +if [ $machine != wcoss2 -a $machine != hera -a $machine != jet -a $machine != orion -a $machine != s4 -a $machine != hercules -a $machine != gaea ]; then set +x echo '***ERROR*** unsupported machine' - echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( wcoss2 | hera | jet | orion | hercules | s4 )' + echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( wcoss2 | hera | jet | orion | hercules | s4 | gaea )' exit 1 fi @@ -54,6 +54,8 @@ elif [ $machine = "wcoss2" ]; then FIX_DIR="/lfs/h2/emc/global/noscrub/emc.global/FIX/fix" elif [ $machine = "s4" ]; then FIX_DIR="/data/prod/glopara/fix" +elif [ $machine = "gaea" ]; then + FIX_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/fix" fi am_ver=${am_ver:-20220805} diff --git a/modulefiles/build.gaea.intel b/modulefiles/build.gaea.intel deleted file mode 100644 index d1e278e36..000000000 --- a/modulefiles/build.gaea.intel +++ /dev/null @@ -1,22 +0,0 @@ -#%Module##################################################### -## Build module for Gaea -############################################################# - -module switch intel/18.0.6.288 - -module load git/2.26.0 -module load cmake/3.17.0 - -# hpc-stack installed as a flat install at: -# /ncrc/home2/Rahul.Mahajan/dev/opt - -setenv ESMFMKFILE /ncrc/home2/Rahul.Mahajan/dev/opt/lib/esmf.mk - -prepend-path PATH /ncrc/home2/Rahul.Mahajan/dev/opt/bin -prepend-path CMAKE_PREFIX_PATH /ncrc/home2/Rahul.Mahajan/dev/opt -prepend-path LD_LIBRARY_PATH /ncrc/home2/Rahul.Mahajan/dev/opt/lib -prepend-path LD_LIBRARY_PATH /ncrc/home2/Rahul.Mahajan/dev/opt/lib64 - -setenv CMAKE_C_COMPILER cc -setenv CMAKE_CXX_COMPILER CC -setenv CMAKE_Fortran_COMPILER ftn diff --git a/modulefiles/build.gaea.intel.lua b/modulefiles/build.gaea.intel.lua new file mode 100644 index 000000000..2898c3a29 --- /dev/null +++ b/modulefiles/build.gaea.intel.lua @@ -0,0 +1,66 @@ +help([[ +Load environment to compile UFS_UTILS on Gaea using Intel +]]) + +prepend_path("MODULEPATH", "/sw/rdtn/modulefiles") +load("hsi") + +prepend_path("MODULEPATH", "/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") + +stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" +load(pathJoin("stack-intel", stack_intel_ver)) + +stack_cray_mpich_ver=os.getenv("stack_cray_mpich_ver") or "8.1.25" +load(pathJoin("stack-cray-mpich", stack_cray_mpich_ver)) + +cmake_ver=os.getenv("cmake_ver") or "3.23.1" +load(pathJoin("cmake", cmake_ver)) + +bacio_ver=os.getenv("bacio_ver") or "2.4.1" +load(pathJoin("bacio", bacio_ver)) + +g2_ver=os.getenv("g2_ver") or "3.4.5" +load(pathJoin("g2", g2_ver)) + +ip_ver=os.getenv("ip_ver") or "4.3.0" +load(pathJoin("ip", ip_ver)) + +nemsio_ver=os.getenv("nemsio_ver") or "2.5.4" +load(pathJoin("nemsio", nemsio_ver)) + +sp_ver=os.getenv("sp_ver") or "2.5.0" +load(pathJoin("sp", sp_ver)) + +w3emc_ver=os.getenv("w3emc_ver") or "2.10.0" +load(pathJoin("w3emc", w3emc_ver)) + +-- Uncomment when CHGRES_ALL is ON +--sfcio_ver=os.getenv("sfcio_ver") or "1.4.1" +--load(pathJoin("sfcio", sfcio_ver)) + +sigio_ver=os.getenv("sigio_ver") or "2.3.2" +load(pathJoin("sigio", sigio_ver)) + +zlib_ver=os.getenv("zlib_ver") or "1.2.13" +load(pathJoin("zlib", zlib_ver)) + +png_ver=os.getenv("png_ver") or "1.6.37" +load(pathJoin("libpng", png_ver)) + +netcdf_c_ver=os.getenv("netcdf_c_ver") or "4.9.2" +load(pathJoin("netcdf-c", netcdf_c_ver)) + +netcdf_fortran_ver=os.getenv("netcdf_fortran_ver") or "4.6.1" +load(pathJoin("netcdf-fortran", netcdf_fortran_ver)) + +nccmp_ver=os.getenv("nccmp_ver") or "1.9.0.1" +load(pathJoin("nccmp", nccmp_ver)) + +esmf_ver=os.getenv("esmf_ver") or "8.6.0" +load(pathJoin("esmf", esmf_ver)) + +nco_ver=os.getenv("nco_ver") or "5.0.6" +load(pathJoin("nco", nco_ver)) + +whatis("Description: UFS_UTILS build environment") + diff --git a/modulefiles/build.noaacloud.intel.lua b/modulefiles/build.noaacloud.intel.lua index a10eeda27..eb6b7a6e0 100644 --- a/modulefiles/build.noaacloud.intel.lua +++ b/modulefiles/build.noaacloud.intel.lua @@ -2,59 +2,21 @@ help([[ Load environment to compile UFS_UTILS on NOAA CSPs using Intel ]]) -cmake_ver=os.getenv("cmake_ver") or "3.16.1" -load(pathJoin("cmake", cmake_ver)) - -hpc_intel_ver=os.getenv("hpc_intel_ver") or "2021.3.0" -load(pathJoin("intel", hpc_intel_ver)) - -impi_ver=os.getenv("impi_ver") or "2021.3.0" -load(pathJoin("impi", impi_ver)) - -bacio_ver=os.getenv("bacio_ver") or "2.4.1" -load(pathJoin("bacio", bacio_ver)) - -g2_ver=os.getenv("g2_ver") or "3.4.5" -load(pathJoin("g2", g2_ver)) - -ip_ver=os.getenv("ip_ver") or "4.0.0" -load(pathJoin("ip", ip_ver)) - -nemsio_ver=os.getenv("nemsio_ver") or "2.5.4" -load(pathJoin("nemsio", nemsio_ver)) +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") -sp_ver=os.getenv("sp_ver") or "2.5.0" -load(pathJoin("sp", sp_ver)) +stack_intel_ver=os.getenv("stack_intel_ver") or "2021.3.0" +load(pathJoin("stack-intel", stack_intel_ver)) -w3emc_ver=os.getenv("w3emc_ver") or "2.9.2" -load(pathJoin("w3emc", w3emc_ver)) +stack_impi_ver=os.getenv("stack_impi_ver") or "2021.3.0" +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) --- Uncomment when CHGRES_ALL is ON ---sfcio_ver=os.getenv("sfcio_ver") or "1.4.1" ---load(pathJoin("sfcio", sfcio_ver)) - -sigio_ver=os.getenv("sigio_ver") or "2.3.2" -load(pathJoin("sigio", sigio_ver)) - -zlib_ver=os.getenv("zlib_ver") or "1.2.11" -load(pathJoin("zlib", zlib_ver)) - -png_ver=os.getenv("png_ver") or "1.6.35" -load(pathJoin("libpng", png_ver)) - -hdf5_ver=os.getenv("hdf5_ver") or "1.10.6" -load(pathJoin("hdf5", hdf5_ver)) - -netcdf_ver=os.getenv("netcdf_ver") or "4.6.1" -load(pathJoin("netcdf", netcdf_ver)) - -nccmp_ver=os.getenv("nccmp_ver") or "1.8.9.0" -load(pathJoin("nccmp", nccmp_ver)) +cmake_ver=os.getenv("cmake_ver") or "3.23.1" +load(pathJoin("cmake", cmake_ver)) -esmf_ver=os.getenv("esmf_ver") or "8.4.0b08" -load(pathJoin("esmf", esmf_ver)) +load("common4noaacloud") -nco_ver=os.getenv("nco_ver") or "4.9.1" -load(pathJoin("nco", nco_ver)) +setenv("CC", "mpiicc") +setenv("CXX", "mpiicpc") +setenv("FC", "mpiifort") -whatis("Description: UFS_UTILS build environment") +whatis("Description: UFS_UTILS build environment on NOAA Cloud") diff --git a/modulefiles/common4noaacloud.lua b/modulefiles/common4noaacloud.lua new file mode 100644 index 000000000..a7028dd9d --- /dev/null +++ b/modulefiles/common4noaacloud.lua @@ -0,0 +1,60 @@ +help([[ +Load environment to compile UFS_UTILS on NOAA CSPs using Intel +]]) + +cmake_ver=os.getenv("cmake_ver") or "3.16.1" +load(pathJoin("cmake", cmake_ver)) + +hpc_intel_ver=os.getenv("hpc_intel_ver") or "2021.3.0" +load(pathJoin("intel", hpc_intel_ver)) + +impi_ver=os.getenv("impi_ver") or "2021.3.0" +load(pathJoin("impi", impi_ver)) + +bacio_ver=os.getenv("bacio_ver") or "2.4.1" +load(pathJoin("bacio", bacio_ver)) + +g2_ver=os.getenv("g2_ver") or "3.4.5" +load(pathJoin("g2", g2_ver)) + +ip_ver=os.getenv("ip_ver") or "4.3.0" +load(pathJoin("ip", ip_ver)) + +nemsio_ver=os.getenv("nemsio_ver") or "2.5.4" +load(pathJoin("nemsio", nemsio_ver)) + +sp_ver=os.getenv("sp_ver") or "2.5.0" +load(pathJoin("sp", sp_ver)) + +w3emc_ver=os.getenv("w3emc_ver") or "2.10.0" +load(pathJoin("w3emc", w3emc_ver)) + +-- Uncomment when CHGRES_ALL is ON +--sfcio_ver=os.getenv("sfcio_ver") or "1.4.1" +--load(pathJoin("sfcio", sfcio_ver)) + +sigio_ver=os.getenv("sigio_ver") or "2.3.2" +load(pathJoin("sigio", sigio_ver)) + +zlib_ver=os.getenv("zlib_ver") or "1.2.13" +load(pathJoin("zlib", zlib_ver)) + +png_ver=os.getenv("png_ver") or "1.6.37" +load(pathJoin("libpng", png_ver)) + +hdf5_ver=os.getenv("hdf5_ver") or "1.10.6" +load(pathJoin("hdf5", hdf5_ver)) + +netcdf_ver=os.getenv("netcdf_ver") or "4.6.1" +load(pathJoin("netcdf", netcdf_ver)) + +nccmp_ver=os.getenv("nccmp_ver") or "1.9.0.1" +load(pathJoin("nccmp", nccmp_ver)) + +esmf_ver=os.getenv("esmf_ver") or "8.6.0" +load(pathJoin("esmf", esmf_ver)) + +nco_ver=os.getenv("nco_ver") or "4.9.1" +load(pathJoin("nco", nco_ver)) + +whatis("Description: UFS_UTILS build environment") diff --git a/reg_tests/global_cycle/C192.lndincsoilnoahmp.sh b/reg_tests/global_cycle/C192.gsi_lndincsoilnoahmp.sh similarity index 73% rename from reg_tests/global_cycle/C192.lndincsoilnoahmp.sh rename to reg_tests/global_cycle/C192.gsi_lndincsoilnoahmp.sh index 58b25d26f..bd16365a3 100755 --- a/reg_tests/global_cycle/C192.lndincsoilnoahmp.sh +++ b/reg_tests/global_cycle/C192.gsi_lndincsoilnoahmp.sh @@ -29,8 +29,6 @@ export OCNRES=99999 export COMIN=$HOMEreg/input_data_noahmp -export LND_SOI_FILE=$COMIN/sfcincr_gsi - export JCAP=1534 export LONB=3072 export LATB=1536 @@ -40,6 +38,7 @@ export use_ufo=.true. export DO_SFCCYCLE=".FALSE." export DO_LNDINC=".TRUE." +export DO_SOI_INC_GSI=".true." export VERBOSE=YES export CYCLVARS=FSNOL=-2.,FSNOS=99999., @@ -49,7 +48,7 @@ $HOMEgfs/ush/global_cycle_driver.sh iret=$? if [ $iret -ne 0 ]; then set +x - echo "<<< C192 LANDINC SOIL NOAHMP CYCLE TEST FAILED. >>>" + echo "<<< C192 GSI based LANDINC SOIL NOAHMP CYCLE TEST FAILED. >>>" exit $iret fi @@ -60,7 +59,19 @@ for files in *tile*.nc do if [ -f $files ]; then echo CHECK $files - $NCCMP -dmfqS $files $HOMEreg/baseline_data/c192.lndincsoilnoahmp/$files + $NCCMP -dmfqS $files $HOMEreg/baseline_data/c192.gsi_lndincsoilnoahmp/$files + iret=$? + if [ $iret -ne 0 ]; then + test_failed=1 + fi + fi +done + +for files in *gaussian_interp* +do + if [ -f $files ]; then + echo CHECK $files + $NCCMP -dmfqS $files $HOMEreg/baseline_data/c192.gsi_lndincsoilnoahmp/$files iret=$? if [ $iret -ne 0 ]; then test_failed=1 @@ -72,7 +83,7 @@ set +x if [ $test_failed -ne 0 ]; then echo echo "**********************************************" - echo "<<< C192 LANDINC SOIL-NOAHMP CYCLE TEST FAILED. >>>" + echo "<<< C192 GSI based LANDINC SOIL-NOAHMP CYCLE TEST FAILED. >>>" echo "**********************************************" if [ "$UPDATE_BASELINE" = "TRUE" ]; then $HOMEgfs/reg_tests/update_baseline.sh $HOMEreg "c192.lndincsoilnoahmp" $commit_num @@ -80,7 +91,7 @@ if [ $test_failed -ne 0 ]; then else echo echo "*****************************************" - echo "<<< C192 LANDINC SOIL-NOAHMP CYCLE TEST PASSED. >>>" + echo "<<< C192 GSI based LANDINC SOIL-NOAHMP CYCLE TEST PASSED. >>>" echo "*****************************************" fi diff --git a/reg_tests/global_cycle/C192.jedi_lndincsoilnoahmp.sh b/reg_tests/global_cycle/C192.jedi_lndincsoilnoahmp.sh new file mode 100755 index 000000000..ebbc868e7 --- /dev/null +++ b/reg_tests/global_cycle/C192.jedi_lndincsoilnoahmp.sh @@ -0,0 +1,88 @@ +#!/bin/bash + +#------------------------------------------------------------------ +# Run global_cycle for a C192 case to test the ingest and +# application of soil moisture and temperature increments +# on the cubed-sphere grid into Noah-MP restarts, which +# yields (almost) identical results as compared with the GSI case +# given the same day of increments on two different grids. +# Compare output to a baseline set of files using the 'nccmp' +# utility. +#------------------------------------------------------------------ + +set -x + +NCCMP=${NCCMP:-$(which nccmp)} + +export MAX_TASKS_CY=6 + +export HOMEgfs=$NWPROD + +export FIXgfs=$HOMEreg/fix + +export CYCLEXEC=$HOMEgfs/exec/global_cycle + +export CDATE=2019073000 +export FHOUR=00 +export DELTSFC=6 + +export CASE=C192 +export OCNRES=99999 + +export COMIN=$HOMEreg/input_data_noahmp + +export JCAP=1534 +export LONB=3072 +export LATB=1536 + +export DONST="NO" +export use_ufo=.true. + +export DO_SFCCYCLE=".FALSE." +export DO_LNDINC=".TRUE." +export DO_SOI_INC_JEDI=".true." + +export VERBOSE=YES +export CYCLVARS=FSNOL=-2.,FSNOS=99999., + +$HOMEgfs/ush/global_cycle_driver.sh + +iret=$? +if [ $iret -ne 0 ]; then + set +x + echo "<<< C192 JEDI based LANDINC SOIL NOAHMP CYCLE TEST FAILED. >>>" + exit $iret +fi + +test_failed=0 + +cd $DATA +for files in *tile*.nc +do + if [ -f $files ]; then + echo CHECK $files + $NCCMP -dmfqS $files $HOMEreg/baseline_data/c192.jedi_lndincsoilnoahmp/$files + iret=$? + if [ $iret -ne 0 ]; then + test_failed=1 + fi + fi +done + +set +x +if [ $test_failed -ne 0 ]; then + echo + echo "**********************************************" + echo "<<< C192 JEDI based LANDINC SOIL-NOAHMP CYCLE TEST FAILED. >>>" + echo "**********************************************" + if [ "$UPDATE_BASELINE" = "TRUE" ]; then + $HOMEgfs/reg_tests/update_baseline.sh $HOMEreg "c192.jedi_lndincsoilnoahmp" $commit_num + fi +else + echo + echo "*****************************************" + echo "<<< C192 JEDI based LANDINC SOIL-NOAHMP CYCLE TEST PASSED. >>>" + echo "*****************************************" +fi + +exit diff --git a/reg_tests/global_cycle/C768.lndincsnow.sh b/reg_tests/global_cycle/C768.lndincsnow.sh index b6455ebd7..a32239d5e 100755 --- a/reg_tests/global_cycle/C768.lndincsnow.sh +++ b/reg_tests/global_cycle/C768.lndincsnow.sh @@ -31,7 +31,8 @@ export FNSNOA=$COMIN/gdas.t00z.snogrb_t1534.3072.1536 export FNACNA=$COMIN/gdas.t00z.seaice.5min.blend.grb export NST_FILE=$COMIN/gdas.t00z.dtfanl.nc -export DO_SNO_INC=.true. # must be lower-case. +export DO_SNO_INC_JEDI=.true. # must be lower-case. +export DO_SOI_INC_JEDI=.false. export JCAP=1534 export LONB=3072 export LATB=1536 diff --git a/reg_tests/global_cycle/driver.hera.sh b/reg_tests/global_cycle/driver.hera.sh index db23cc6f1..b0f4f938c 100755 --- a/reg_tests/global_cycle/driver.hera.sh +++ b/reg_tests/global_cycle/driver.hera.sh @@ -64,8 +64,8 @@ TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_C LOG_FILE=consistency.log02 export DATA="${DATA_DIR}/test2" export COMOUT=$DATA -TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.lndincsoilnoahmp \ - -o $LOG_FILE -e $LOG_FILE ./C192.lndincsoilnoahmp.sh) +TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.gsi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.gsi_lndincsoilnoahmp.sh) LOG_FILE=consistency.log03 export DATA="${DATA_DIR}/test3" @@ -79,10 +79,16 @@ export COMOUT=$DATA TEST4=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c48.noahmp.frac \ -o $LOG_FILE -e $LOG_FILE ./C48.noahmp.fracgrid.sh) +LOG_FILE=consistency.log05 +export DATA="${DATA_DIR}/test5" +export COMOUT=$DATA +TEST5=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.jedi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.jedi_lndincsoilnoahmp.sh) + LOG_FILE=consistency.log sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J chgres_summary -o $LOG_FILE -e $LOG_FILE \ --open-mode=append -q $QUEUE -d\ - afterok:$TEST1:$TEST2:$TEST3:$TEST4 << EOF + afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > summary.log EOF diff --git a/reg_tests/global_cycle/driver.hercules.sh b/reg_tests/global_cycle/driver.hercules.sh index b522ad1bc..db1cf8431 100755 --- a/reg_tests/global_cycle/driver.hercules.sh +++ b/reg_tests/global_cycle/driver.hercules.sh @@ -64,8 +64,8 @@ TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_C LOG_FILE=consistency.log02 export DATA="${DATA_DIR}/test2" export COMOUT=$DATA -TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.lndincsoilnoahmp \ - -o $LOG_FILE -e $LOG_FILE ./C192.lndincsoilnoahmp.sh) +TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.gsi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.gsi_lndincsoilnoahmp.sh) LOG_FILE=consistency.log03 export DATA="${DATA_DIR}/test3" @@ -79,10 +79,16 @@ export COMOUT=$DATA TEST4=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c48.noahmp.frac \ -o $LOG_FILE -e $LOG_FILE ./C48.noahmp.fracgrid.sh) +LOG_FILE=consistency.log05 +export DATA="${DATA_DIR}/test5" +export COMOUT=$DATA +TEST5=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.jedi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.jedi_lndincsoilnoahmp.sh) + LOG_FILE=consistency.log sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J chgres_summary -o $LOG_FILE -e $LOG_FILE \ --open-mode=append -q $QUEUE -d\ - afterok:$TEST1:$TEST2:$TEST3:$TEST4 << EOF + afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > summary.log EOF diff --git a/reg_tests/global_cycle/driver.jet.sh b/reg_tests/global_cycle/driver.jet.sh index d680dc023..e88d8b21b 100755 --- a/reg_tests/global_cycle/driver.jet.sh +++ b/reg_tests/global_cycle/driver.jet.sh @@ -62,8 +62,8 @@ TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_C LOG_FILE=consistency.log02 export DATA="${DATA_DIR}/test2" export COMOUT=$DATA -TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.lndincsoilnoahmp \ - --partition=xjet -o $LOG_FILE -e $LOG_FILE ./C192.lndincsoilnoahmp.sh) +TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.gsi_lndincsoilnoahmp \ + --partition=xjet -o $LOG_FILE -e $LOG_FILE ./C192.gsi_lndincsoilnoahmp.sh) LOG_FILE=consistency.log03 export DATA="${DATA_DIR}/test3" @@ -77,10 +77,16 @@ export COMOUT=$DATA TEST4=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c48.noahmp.frac \ --partition=xjet -o $LOG_FILE -e $LOG_FILE ./C48.noahmp.fracgrid.sh) +LOG_FILE=consistency.log05 +export DATA="${DATA_DIR}/test5" +export COMOUT=$DATA +TEST5=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.jedi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.jedi_lndincsoilnoahmp.sh) + LOG_FILE=consistency.log sbatch --partition=xjet --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J summary -o $LOG_FILE -e $LOG_FILE \ --open-mode=append -q $QUEUE -d\ - afterok:$TEST1:$TEST2:$TEST3:$TEST4 << EOF + afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > ./summary.log EOF diff --git a/reg_tests/global_cycle/driver.orion.sh b/reg_tests/global_cycle/driver.orion.sh index d3f4f6415..472608dcf 100755 --- a/reg_tests/global_cycle/driver.orion.sh +++ b/reg_tests/global_cycle/driver.orion.sh @@ -62,8 +62,8 @@ TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_C LOG_FILE=consistency.log02 export DATA="${DATA_DIR}/test2" export COMOUT=$DATA -TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.lndincsoilnoahmp \ - -o $LOG_FILE -e $LOG_FILE ./C192.lndincsoilnoahmp.sh) +TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.gsi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.gsi_lndincsoilnoahmp.sh) LOG_FILE=consistency.log03 export DATA="${DATA_DIR}/test3" @@ -77,10 +77,16 @@ export COMOUT=$DATA TEST4=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c48.noahmp.frac \ -o $LOG_FILE -e $LOG_FILE ./C48.noahmp.fracgrid.sh) +LOG_FILE=consistency.log05 +export DATA="${DATA_DIR}/test5" +export COMOUT=$DATA +TEST5=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.jedi_lndincsoilnoahmp \ + -o $LOG_FILE -e $LOG_FILE ./C192.jedi_lndincsoilnoahmp.sh) + LOG_FILE=consistency.log sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J chgres_summary -o $LOG_FILE -e $LOG_FILE \ --open-mode=append -q $QUEUE -d\ - afterok:$TEST1:$TEST2:$TEST3:$TEST4 << EOF + afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > summary.log EOF diff --git a/reg_tests/global_cycle/driver.wcoss2.sh b/reg_tests/global_cycle/driver.wcoss2.sh index 077896cd3..1655a4bb4 100755 --- a/reg_tests/global_cycle/driver.wcoss2.sh +++ b/reg_tests/global_cycle/driver.wcoss2.sh @@ -69,7 +69,7 @@ TEST1=$(qsub -V -o ${LOG_FILE}01 -e ${LOG_FILE}01 -q $QUEUE -A $PROJECT_CODE -l export DATA="${DATA_DIR}/test2" export COMOUT=$DATA TEST2=$(qsub -V -o ${LOG_FILE}02 -e ${LOG_FILE}02 -q $QUEUE -A $PROJECT_CODE -l walltime=00:05:00 \ - -N c192.lndincsoilnoahmp -l select=1:ncpus=12:mem=8GB $PWD/C192.lndincsoilnoahmp.sh) + -N c192.gsi_lndincsoilnoahmp -l select=1:ncpus=12:mem=8GB $PWD/C192.gsi_lndincsoilnoahmp.sh) export DATA="${DATA_DIR}/test3" export COMOUT=$DATA @@ -81,8 +81,13 @@ export COMOUT=$DATA TEST4=$(qsub -V -o ${LOG_FILE}04 -e ${LOG_FILE}04 -q $QUEUE -A $PROJECT_CODE -l walltime=00:05:00 \ -N c48.noahmp.frac -l select=1:ncpus=12:mem=8GB $PWD/C48.noahmp.fracgrid.sh) +export DATA="${DATA_DIR}/test5" +export COMOUT=$DATA +TEST5=$(qsub -V -o ${LOG_FILE}05 -e ${LOG_FILE}05 -q $QUEUE -A $PROJECT_CODE -l walltime=00:05:00 \ + -N c192.jedi_lndincsoilnoahmp -l select=1:ncpus=12:mem=8GB $PWD/C192.jedi_lndincsoilnoahmp.sh) + qsub -V -o ${LOG_FILE} -e ${LOG_FILE} -q $QUEUE -A $PROJECT_CODE -l walltime=00:01:00 \ - -N cycle_summary -l select=1:ncpus=1:mem=100MB -W depend=afterok:$TEST1:$TEST2:$TEST3:$TEST4 << EOF + -N cycle_summary -l select=1:ncpus=1:mem=100MB -W depend=afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5 << EOF #!/bin/bash cd $reg_dir grep -a '<<<' ${LOG_FILE}?? | grep -v echo > summary.log diff --git a/reg_tests/grid_gen/driver.hera.sh b/reg_tests/grid_gen/driver.hera.sh index 66673ab34..00dbc1e79 100755 --- a/reg_tests/grid_gen/driver.hera.sh +++ b/reg_tests/grid_gen/driver.hera.sh @@ -103,19 +103,26 @@ TEST5=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_ -o $LOG_FILE5 -e $LOG_FILE5 ./esg.regional.pct.cat.sh) #----------------------------------------------------------------------------- -# Regional GSL gravity wave drag test. +# Regional GSL gravity wave drag test. This test is run with varying +# thread counts. #----------------------------------------------------------------------------- +export nthreads=12 LOG_FILE6=${LOG_FILE}06 -TEST6=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd \ +TEST6=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.12 \ -o $LOG_FILE6 -e $LOG_FILE6 ./regional.gsl.gwd.sh) +export nthreads=24 +LOG_FILE7=${LOG_FILE}07 +TEST7=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.24 \ + -o $LOG_FILE7 -e $LOG_FILE7 ./regional.gsl.gwd.sh) + #----------------------------------------------------------------------------- # Create summary log. #----------------------------------------------------------------------------- sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J grid_summary -o $LOG_FILE -e $LOG_FILE \ - --open-mode=append -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6 << EOF + --open-mode=append -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6:$TEST7 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > $SUM_FILE EOF diff --git a/reg_tests/grid_gen/driver.hercules.sh b/reg_tests/grid_gen/driver.hercules.sh index 5f4a81478..b5da4c807 100755 --- a/reg_tests/grid_gen/driver.hercules.sh +++ b/reg_tests/grid_gen/driver.hercules.sh @@ -100,19 +100,27 @@ TEST5=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_ -o $LOG_FILE5 -e $LOG_FILE5 ./esg.regional.pct.cat.sh) #----------------------------------------------------------------------------- -# Regional grid with GSL gravity wave drag fields. +# Regional grid with GSL gravity wave drag fields. Run with varying +# thread counts. #----------------------------------------------------------------------------- +export nthreads=12 LOG_FILE6=${LOG_FILE}06 -TEST6=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd \ +TEST6=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.12 \ -o $LOG_FILE6 -e $LOG_FILE6 ./regional.gsl.gwd.sh) + +export nthreads=24 +LOG_FILE7=${LOG_FILE}07 +TEST7=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.24 \ + -o $LOG_FILE7 -e $LOG_FILE7 ./regional.gsl.gwd.sh) + #----------------------------------------------------------------------------- # Create summary log. #----------------------------------------------------------------------------- sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J grid_summary -o $LOG_FILE -e $LOG_FILE \ - -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6 << EOF + -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6:$TEST7 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > $SUM_FILE EOF diff --git a/reg_tests/grid_gen/driver.jet.sh b/reg_tests/grid_gen/driver.jet.sh index 2a4c76a1f..e8d024fba 100755 --- a/reg_tests/grid_gen/driver.jet.sh +++ b/reg_tests/grid_gen/driver.jet.sh @@ -101,13 +101,20 @@ TEST5=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_ --partition=xjet -o $LOG_FILE5 -e $LOG_FILE5 ./esg.regional.pct.cat.sh) #----------------------------------------------------------------------------- -# Regional GSL gravity wave drag. +# Regional GSL gravity wave drag. Run with varying +# thread counts. #----------------------------------------------------------------------------- +export nthreads=12 LOG_FILE6=${LOG_FILE}06 -TEST6=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd \ +TEST6=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.12 \ --partition=xjet -o $LOG_FILE6 -e $LOG_FILE6 ./regional.gsl.gwd.sh) +export nthreads=24 +LOG_FILE7=${LOG_FILE}07 +TEST7=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:07:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.24 \ + --partition=xjet -o $LOG_FILE7 -e $LOG_FILE7 ./regional.gsl.gwd.sh) + #----------------------------------------------------------------------------- # Create summary log. #----------------------------------------------------------------------------- diff --git a/reg_tests/grid_gen/driver.orion.sh b/reg_tests/grid_gen/driver.orion.sh index 03ff3ed6c..18fda2e69 100755 --- a/reg_tests/grid_gen/driver.orion.sh +++ b/reg_tests/grid_gen/driver.orion.sh @@ -99,19 +99,26 @@ TEST5=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_ -o $LOG_FILE5 -e $LOG_FILE5 ./esg.regional.pct.cat.sh) #----------------------------------------------------------------------------- -# Regional grid with GSL gravity wave drag fields. +# Regional grid with GSL gravity wave drag fields. Run with varying +# thread counts. #----------------------------------------------------------------------------- +export nthreads=12 LOG_FILE6=${LOG_FILE}06 -TEST6=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd \ +TEST6=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.12 \ -o $LOG_FILE6 -e $LOG_FILE6 ./regional.gsl.gwd.sh) +export nthreads=24 +LOG_FILE7=${LOG_FILE}07 +TEST7=$(sbatch --parsable --ntasks-per-node=24 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J reg.gsl.gwd.24 \ + -o $LOG_FILE7 -e $LOG_FILE7 ./regional.gsl.gwd.sh) + #----------------------------------------------------------------------------- # Create summary log. #----------------------------------------------------------------------------- sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J grid_summary -o $LOG_FILE -e $LOG_FILE \ - -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6 << EOF + -q $QUEUE -d afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6:$TEST7 << EOF #!/bin/bash grep -a '<<<' ${LOG_FILE}* > $SUM_FILE EOF diff --git a/reg_tests/grid_gen/driver.wcoss2.sh b/reg_tests/grid_gen/driver.wcoss2.sh index 35c077f34..4e381c797 100755 --- a/reg_tests/grid_gen/driver.wcoss2.sh +++ b/reg_tests/grid_gen/driver.wcoss2.sh @@ -104,19 +104,26 @@ TEST5=$(qsub -V -o $LOG_FILE5 -e $LOG_FILE5 -q $QUEUE -A $PROJECT_CODE -l wallti -N esg.regional.pct.cat -l select=1:ncpus=30:mem=40GB $PWD/esg.regional.pct.cat.sh) #----------------------------------------------------------------------------- -# Regional GSL gravity wave drag test. +# Regional GSL gravity wave drag test. Run with varying +# thread counts. #----------------------------------------------------------------------------- +export nthreads=15 LOG_FILE6=${LOG_FILE}06 TEST6=$(qsub -V -o $LOG_FILE6 -e $LOG_FILE6 -q $QUEUE -A $PROJECT_CODE -l walltime=00:07:00 \ - -N rsg.gsl.gwd -l select=1:ncpus=30:mem=40GB $PWD/regional.gsl.gwd.sh) + -N reg.gsl.gwd.15 -l select=1:ncpus=15:mem=40GB $PWD/regional.gsl.gwd.sh) + +export nthreads=30 +LOG_FILE7=${LOG_FILE}07 +TEST7=$(qsub -V -o $LOG_FILE7 -e $LOG_FILE7 -q $QUEUE -A $PROJECT_CODE -l walltime=00:07:00 \ + -N reg.gsl.gwd -l select=1:ncpus=30:mem=40GB $PWD/regional.gsl.gwd.sh) #----------------------------------------------------------------------------- # Create summary log. #----------------------------------------------------------------------------- qsub -V -o ${LOG_FILE} -e ${LOG_FILE} -q $QUEUE -A $PROJECT_CODE -l walltime=00:02:00 \ - -N grid_summary -l select=1:ncpus=1:mem=100MB -W depend=afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6 << EOF + -N grid_summary -l select=1:ncpus=1:mem=100MB -W depend=afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6:$TEST7 << EOF #!/bin/bash cd ${this_dir} grep -a '<<<' ${LOG_FILE}* | grep -v echo > $SUM_FILE diff --git a/reg_tests/grid_gen/regional.gsl.gwd.sh b/reg_tests/grid_gen/regional.gsl.gwd.sh index 311116c1d..5ef2c8047 100755 --- a/reg_tests/grid_gen/regional.gsl.gwd.sh +++ b/reg_tests/grid_gen/regional.gsl.gwd.sh @@ -8,8 +8,11 @@ set -x -export TEMP_DIR=${WORK_DIR}/regional.gsl.gwd.work -export out_dir=${WORK_DIR}/regional.gsl.gwd +nthreads=${nthreads:-6} +export OMP_NUM_THREADS=$nthreads + +export TEMP_DIR=${WORK_DIR}/regional.gsl.gwd.${nthreads}.work +export out_dir=${WORK_DIR}/regional.gsl.gwd.${nthreads} export gtype=regional_esg export make_gsl_orog=true # Create GSL gravity wave drag fields @@ -34,7 +37,7 @@ $home_dir/ush/fv3gfs_driver_grid.sh iret=$? if [ $iret -ne 0 ]; then set +x - echo "<<< REGIONAL GSL GWD TEST FAILED. <<<" + echo "<<< REGIONAL ${nthreads} THREAD GSL GWD TEST FAILED. <<<" exit $iret fi @@ -61,12 +64,12 @@ done set +x if [ $test_failed -ne 0 ]; then - echo "<<< REGIONAL GSL GWD TEST FAILED. >>>" + echo "<<< REGIONAL ${nthreads} THREAD GSL GWD TEST FAILED. >>>" if [ "$UPDATE_BASELINE" = "TRUE" ]; then $home_dir/reg_tests/update_baseline.sh "${HOMEreg}/.." "regional.gsl.gwd" $commit_num fi else - echo "<<< REGIONAL GSL GWD TEST PASSED. >>>" + echo "<<< REGIONAL ${nthreads} THREAD GSL GWD TEST PASSED. >>>" fi exit 0 diff --git a/sorc/global_cycle.fd/cycle.f90 b/sorc/global_cycle.fd/cycle.f90 index 5191a5ffa..ef6221eae 100644 --- a/sorc/global_cycle.fd/cycle.f90 +++ b/sorc/global_cycle.fd/cycle.f90 @@ -42,11 +42,14 @@ !! file. !! - $NST_FILE Gaussian GSI file which contains NSST !! TREF increments -!! - $LND_SOI_FILE.$NNN Gaussian GSI file which contains soil state +!! - $sfcincr_gsi.$NNN Gaussian GSI file which contains soil state !! increments -!! - xainc.$NNN The cubed-sphere increment file (contains +!! - snow_xainc.$NNN The cubed-sphere snow increment file (contains !! increments calculated by JEDI on the native !! model grid). +!! - soil_xainc.$NNN The cubed-sphere soil increment file (contains +!! soil temperature and soil moisture increments +!! calculated by JEDI on the native model grid). !! !! OUTPUT FILES: !! - fnbgso.$NNN The updated sfc/nsst restart file. @@ -70,8 +73,17 @@ !! -DO_SFCCYCLE Call sfccycle routine to update surface fields !! -DO_LNDINC Read in land increment files, and add increments to !! relevant states. -!! -DO_SOI_INC Do land increments to soil states. -!! -DO_SNO_INC Do land increments to snow states. +!! NOTE: We do not have a GSI snow analysis +!! -DO_SOI_INC_GSI Do land increments to soil states on Gaussian grids. +!! -DO_SOI_INC_JEDI Do land increments to soil states on cubed-sphere tiles. +!! -DO_SNO_INC_JEDI Do land increments to snow states on cubed-sphere tiles +!! (Noah land model only). +!! -LSOIL_INCR Number of soil layers (from top) to apply soil increments to. +!! LSOIL_INCR is currently set to 3 by default. +!! Extra cautions are needed on layer#3 across permafrost regions due to +!! over sensitivity of moisture change when temperature approaches tfreez. +!! Please feel free to contact Yuan Xue (yuan.xue@noaa.gov) for further +!! concerns regarding this issue. !! - ISOT Use statsgo soil type when '1'. Use zobler when '0'. !! - IVEGSRC Use igbp veg type when '1'. Use sib when '2'. !! - ZSEA1/2_MM When running with NSST model, this is the lower/ @@ -85,8 +97,6 @@ !! (max_tasks-1). !! -NST_FILE path/name of the gaussian GSI file which contains NSST !! TREF increments. -!! -LND_SOI_FILE path/name of the gaussian GSI file which contains soil -!! state increments. !! !! -2005-02-03: Iredell for global_analysis !! -2014-11-30: xuli add nst_anl @@ -304,8 +314,9 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & USE READ_WRITE_DATA use machine USE MPI - USE LAND_INCREMENTS, ONLY: ADD_INCREMENT_SOIL, & - ADD_INCREMENT_SNOW, & + USE LAND_INCREMENTS, ONLY: GAUSSIAN_TO_FV3_INTERP, & + ADD_INCREMENT_SOIL, & + ADD_JEDI_INCREMENT_SNOW, & CALCULATE_LANDINC_MASK, & APPLY_LAND_DA_ADJUSTMENTS_SOIL, & APPLY_LAND_DA_ADJUSTMENTS_SND, & @@ -327,7 +338,7 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & CHARACTER(LEN=5) :: TILE_NUM CHARACTER(LEN=500) :: NST_FILE - CHARACTER(LEN=500) :: LND_SOI_FILE + CHARACTER(LEN=500) :: GSI_SOI_FILE,JEDI_SOI_FILE,JEDI_SNO_FILE CHARACTER(LEN=4) :: INPUT_NML_FILE(SZ_NML) INTEGER :: I, IERR @@ -375,22 +386,26 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & real, dimension(lensfc) :: tf_clm_tile,tf_trd_tile,sal_clm_tile INTEGER :: veg_type_landice INTEGER, DIMENSION(LENSFC) :: STC_UPDATED, SLC_UPDATED + REAL, DIMENSION(LENSFC,LSOIL) :: STCINC, SLCINC - LOGICAL :: FILE_EXISTS, DO_SOI_INC, DO_SNO_INC + LOGICAL :: FILE_EXISTS, DO_SOI_INC_GSI, DO_SOI_INC_JEDI, DO_SNO_INC_JEDI CHARACTER(LEN=3) :: RANKCH + INTEGER :: lsoil_incr !-------------------------------------------------------------------------------- ! NST_FILE is the path/name of the gaussian GSI file which contains NSST ! increments. !-------------------------------------------------------------------------------- - NAMELIST/NAMSFCD/ NST_FILE, LND_SOI_FILE, DO_SNO_INC + NAMELIST/NAMSFCD/ NST_FILE, lsoil_incr, DO_SNO_INC_JEDI, DO_SOI_INC_JEDI, DO_SOI_INC_GSI DATA NST_FILE/'NULL'/ - DATA LND_SOI_FILE/'NULL'/ - DO_SNO_INC = .FALSE. - DO_SOI_INC = .FALSE. + DO_SNO_INC_JEDI = .FALSE. + DO_SOI_INC_GSI = .FALSE. + DO_SOI_INC_JEDI = .FALSE. + lsoil_incr = 3 !default + SIG1T = 0.0 ! Not a dead start! @@ -455,15 +470,19 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & IF (DO_LNDINC) THEN ! identify variables to be updated, and allocate arrays. - IF (TRIM(LND_SOI_FILE) .NE. "NULL") THEN - DO_SOI_INC = .TRUE. + IF (DO_SOI_INC_GSI .and. DO_SOI_INC_JEDI) THEN + PRINT* + PRINT*, 'FATAL ERROR: Can not do gsi and jedi soil updates at the same time, choose one!' + CALL MPI_ABORT(MPI_COMM_WORLD, 15, IERR) + ENDIF + IF (DO_SOI_INC_GSI .or. DO_SOI_INC_JEDI) THEN PRINT* - PRINT*," APPLYING SOIL INCREMENTS FROM THE GSI" + PRINT*," APPLYING SOIL INCREMENTS FROM GSI OR JEDI" ALLOCATE(STC_BCK(LENSFC, LSOIL), SMC_BCK(LENSFC, LSOIL), SLC_BCK(LENSFC,LSOIL)) ALLOCATE(LANDINC_MASK_FG(LENSFC)) ENDIF ! FOR NOW, CODE SO CAN DO BOTH, BUT MIGHT NEED TO THINK ABOUT THIS SOME MORE. - IF (DO_SNO_INC) THEN + IF (DO_SNO_INC_JEDI) THEN ! ideally, would check here that sfcsub snow DA update is not also requested ! but latter is controlled by fnsol, which is read in within that routine. ! should be done at script level. @@ -484,7 +503,7 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & ! READ THE INPUT SURFACE DATA ON THE CUBED-SPHERE TILE. !-------------------------------------------------------------------------------- - CALL READ_DATA(LSOIL,LENSFC,DO_NSST,.false.,IS_NOAHMP=IS_NOAHMP, & + CALL READ_DATA(LSOIL,LENSFC,DO_NSST,DO_SNO_INC_JEDI,DO_SOI_INC_JEDI,.false.,IS_NOAHMP=IS_NOAHMP, & TSFFCS=TSFFCS,SMCFCS=SMCFCS, & SWEFCS=SWEFCS,STCFCS=STCFCS,TG3FCS=TG3FCS,ZORFCS=ZORFCS, & CVFCS=CVFCS, CVBFCS=CVBFCS,CVTFCS=CVTFCS,ALBFCS=ALBFCS, & @@ -501,7 +520,7 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & call MPI_ABORT(MPI_COMM_WORLD, 18, IERR) ENDIF - IF (IS_NOAHMP .AND. DO_SNO_INC) THEN + IF (IS_NOAHMP .AND. DO_SNO_INC_JEDI) THEN print *, 'FATAL ERROR: Snow increment update does not work with NOAH_MP.' call MPI_ABORT(MPI_COMM_WORLD, 29, IERR) ENDIF @@ -540,7 +559,7 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & ! CALCULATE MASK FOR LAND INCREMENTS IF (DO_LNDINC) & - CALL CALCULATE_LANDINC_MASK(SMCFCS(:,1),SWEFCS, VETFCS, & + CALL CALCULATE_LANDINC_MASK(SWEFCS, VETFCS, SOTFCS, & LENSFC,VEG_TYPE_LANDICE, LANDINC_MASK) !-------------------------------------------------------------------------------- @@ -662,24 +681,35 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & ENDIF !-------------------------------------------------------------------------------- -! READ IN AND APPLY LAND INCREMENTS FROM THE GSI +! READ IN AND APPLY LAND INCREMENTS FROM THE GSI/JEDI !-------------------------------------------------------------------------------- IF (DO_LNDINC) THEN ! SNOW INCREMENTS ! do snow first, as temperature updates will use snow analaysis - IF (DO_SNO_INC) THEN + IF (DO_SNO_INC_JEDI) THEN ! updates are made to snow depth only over land (and not-land ice). ! SWE is then updated from the snow depth analysis, using the model ! forecast density + ! make sure incr. files exist + WRITE(RANKCH, '(I3.3)') (MYRANK+1) + JEDI_SNO_FILE = "snow_xainc." // RANKCH + + INQUIRE(FILE=trim(JEDI_SNO_FILE), EXIST=file_exists) + IF (.not. file_exists) then + print *, 'FATAL ERROR: snow increment (fv3 grid) update requested, & + but file does not exist : ', trim(jedi_sno_file) + call MPI_ABORT(MPI_COMM_WORLD, 10, IERR) + ENDIF + !-------------------------------------------------------------------------------- ! read increments in !-------------------------------------------------------------------------------- ! Only coded for DA on native model grid (would always be the case for cycling DA) - CALL READ_DATA(LSOIL,LENSFC,.false.,.true.,SNDFCS=SND_INC) + CALL READ_DATA(LSOIL,LENSFC,.false.,.true.,.false.,.true.,SNDFCS=SND_INC) !-------------------------------------------------------------------------------- ! add increments to state vars @@ -689,7 +719,7 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & SND_BCK = SNDFCS SWE_BCK = SWEFCS - CALL ADD_INCREMENT_SNOW(SND_INC,LANDINC_MASK,LENSFC,SNDFCS) + CALL ADD_JEDI_INCREMENT_SNOW(SND_INC,LANDINC_MASK,LENSFC,SNDFCS) !-------------------------------------------------------------------------------- ! make any necessary adjustments to dependent variables @@ -698,72 +728,102 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & CALL APPLY_LAND_DA_ADJUSTMENTS_SND(LSM, LENSFC, LANDINC_MASK, SWE_BCK, SND_BCK, & SNDFCS, SWEFCS) + ENDIF ! jedi snow increments + + !re-calculate soilsnow mask if snow has been updated. + LANDINC_MASK_FG = LANDINC_MASK + + IF (DO_SFCCYCLE .OR. DO_SNO_INC_JEDI) THEN + CALL CALCULATE_LANDINC_MASK(SWEFCS, VETFCS, SOTFCS, LENSFC, & + VEG_TYPE_LANDICE, LANDINC_MASK) ENDIF + ! store background states + STC_BCK = STCFCS + SMC_BCK = SMCFCS + SLC_BCK = SLCFCS + ! SOIL INCREMENTS - IF (DO_SOI_INC) THEN + IF (DO_SOI_INC_GSI) THEN !-------------------------------------------------------------------------------- - ! re-calculate soilsnow mask if snow has been updated. + ! read increments in !-------------------------------------------------------------------------------- + ! make sure incr. files exist + WRITE(RANKCH, '(I3.3)') (MYRANK+1) + GSI_SOI_FILE = "sfcincr_gsi." // RANKCH + + INQUIRE(FILE=trim(GSI_SOI_FILE), EXIST=file_exists) + IF (.not. file_exists) then + print *, 'FATAL ERROR: gsi soil increment (gaussian grid) update requested, & + but file does not exist : ', trim(gsi_soi_file) + call MPI_ABORT(MPI_COMM_WORLD, 10, IERR) + ENDIF - LANDINC_MASK_FG = LANDINC_MASK + CALL READ_GSI_DATA(GSI_SOI_FILE, 'LND', LSOIL=LSOIL) - IF (DO_SFCCYCLE .OR. DO_SNO_INC) THEN - CALL CALCULATE_LANDINC_MASK(SMCFCS(:,1),SWEFCS, VETFCS, LENSFC, & - VEG_TYPE_LANDICE, LANDINC_MASK ) - ENDIF + !-------------------------------------------------------------------------------- + ! interpolate increments to cubed sphere tiles + !-------------------------------------------------------------------------------- + + CALL GAUSSIAN_TO_FV3_INTERP(LSOIL_INCR,RLA,RLO,& + STCINC,SLCINC,LANDINC_MASK,LENSFC,LSOIL,IDIM,JDIM,LSM,MYRANK) + + !-------------------------------------------------------------------------------- + ! save interpolated increments + !-------------------------------------------------------------------------------- + CALL WRITE_DATA(LENSFC,IDIM,JDIM,LSOIL,DO_NSST,.true.,NSST, & + STCINC=STCINC,SLCINC=SLCINC) + + ENDIF ! end reading and interpolating gsi soil increments + + IF (DO_SOI_INC_JEDI) THEN !-------------------------------------------------------------------------------- ! read increments in !-------------------------------------------------------------------------------- - + ! make sure incr. files exist WRITE(RANKCH, '(I3.3)') (MYRANK+1) + JEDI_SOI_FILE = "soil_xainc." // RANKCH - LND_SOI_FILE = trim(LND_SOI_FILE) // "." // RANKCH - - INQUIRE(FILE=trim(LND_SOI_FILE), EXIST=file_exists) + INQUIRE(FILE=trim(JEDI_SOI_FILE), EXIST=file_exists) IF (.not. file_exists) then - print *, 'FATAL ERROR: land increment update requested, but file does not exist: ', & - trim(lnd_soi_file) + print *, 'FATAL ERROR: soil increment (fv3 grid) update requested, but file & + does not exist: ', trim(jedi_soi_file) call MPI_ABORT(MPI_COMM_WORLD, 10, IERR) ENDIF - CALL READ_GSI_DATA(LND_SOI_FILE, 'LND', LSOIL=LSOIL) + CALL READ_DATA(LSOIL,LENSFC,.false.,.false.,.true., & + .true.,IS_NOAHMP=IS_NOAHMP, & + STCINC=STCINC,SLCINC=SLCINC) + + ENDIF ! end reading jedi soil increments + + IF (DO_SOI_INC_GSI .or. DO_SOI_INC_JEDI) THEN !-------------------------------------------------------------------------------- ! add increments to state vars !-------------------------------------------------------------------------------- - ! when applying increments, will often need to adjust other land states in response - ! to the changes made. Need to store bacground, apply the increments, then make - ! secondart adjustments. When updating more than one state, be careful about the - ! order if increments and secondary adjustments. - - ! store background states - STC_BCK = STCFCS - SMC_BCK = SMCFCS - SLC_BCK = SLCFCS ! below updates [STC/SMC/STC]FCS to hold the analysis - CALL ADD_INCREMENT_SOIL(RLA,RLO,STCFCS,SMCFCS,SLCFCS,STC_UPDATED, SLC_UPDATED, & - LANDINC_MASK,LANDINC_MASK_FG,LENSFC,LSOIL,IDIM,JDIM,LSM,MYRANK) + CALL ADD_INCREMENT_SOIL(LSOIL_INCR,STCINC,SLCINC,STCFCS,SMCFCS,SLCFCS,STC_UPDATED, & + SLC_UPDATED,LANDINC_MASK,LANDINC_MASK_FG,LENSFC,LSOIL,LSM,MYRANK) !-------------------------------------------------------------------------------- ! make any necessary adjustments to dependent variables !-------------------------------------------------------------------------------- + CALL APPLY_LAND_DA_ADJUSTMENTS_SOIL(LSOIL_INCR, LSM, ISOT, IVEGSRC,LENSFC, LSOIL, & + SOTFCS, LANDINC_MASK_FG, STC_BCK, STCFCS, SMCFCS, SLCFCS, STC_UPDATED, & + SLC_UPDATED,ZSOIL) - CALL APPLY_LAND_DA_ADJUSTMENTS_SOIL(LSM, ISOT, IVEGSRC,LENSFC, LSOIL, & - SOTFCS, LANDINC_MASK_FG, STC_BCK, STCFCS, SMCFCS, SLCFCS, STC_UPDATED, & - SLC_UPDATED,ZSOIL) - ENDIF ! soil increments + ENDIF ! end applying soil increments and making adjustments !-------------------------------------------------------------------------------- ! clean up !-------------------------------------------------------------------------------- - ! to do - save and write out STC_INC? (soil temperature increments) IF(ALLOCATED(LANDINC_MASK_FG)) DEALLOCATE(LANDINC_MASK_FG) IF(ALLOCATED(LANDINC_MASK)) DEALLOCATE(LANDINC_MASK) IF(ALLOCATED(STC_BCK)) DEALLOCATE(STC_BCK) @@ -780,14 +840,14 @@ SUBROUTINE SFCDRV(LUGB, IDIM,JDIM,LENSFC,LSOIL,DELTSFC, & IF (LSM==LSM_NOAHMP) THEN - CALL WRITE_DATA(LENSFC,IDIM,JDIM,LSOIL,DO_NSST,NSST,VEGFCS=VEGFCS, & + CALL WRITE_DATA(LENSFC,IDIM,JDIM,LSOIL,DO_NSST,.false.,NSST,VEGFCS=VEGFCS, & SLCFCS=SLCFCS,SMCFCS=SMCFCS,STCFCS=STCFCS,& SICFCS=SICFCS,SIHFCS=SIHFCS) ELSEIF (LSM==LSM_NOAH) THEN CALL WRITE_DATA(LENSFC,IDIM,JDIM,LSOIL, & - DO_NSST,NSST,SLIFCS=SLIFCS,TSFFCS=TSFFCS,VEGFCS=VEGFCS, & + DO_NSST,.false.,NSST,SLIFCS=SLIFCS,TSFFCS=TSFFCS,VEGFCS=VEGFCS, & SWEFCS=SWEFCS,TG3FCS=TG3FCS,ZORFCS=ZORFCS, & ALBFCS=ALBFCS,ALFFCS=ALFFCS,CNPFCS=CNPFCS, & F10M=F10M,T2M=T2M,Q2M=Q2M,VETFCS=VETFCS, & diff --git a/sorc/global_cycle.fd/land_increments.f90 b/sorc/global_cycle.fd/land_increments.f90 index b9738250c..3cff0c163 100644 --- a/sorc/global_cycle.fd/land_increments.f90 +++ b/sorc/global_cycle.fd/land_increments.f90 @@ -6,8 +6,9 @@ module land_increments private + public gaussian_to_fv3_interp public add_increment_soil - public add_increment_snow + public add_jedi_increment_snow public calculate_landinc_mask public apply_land_da_adjustments_soil public apply_land_da_adjustments_snd @@ -18,42 +19,32 @@ module land_increments !! copied from GFS_typedefs.F90 ! control state for soil analysis: - integer, parameter :: lsoil_incr=3 !< number of layers to add incrments to real, parameter :: tfreez=273.16 !< con_t0c in physcons contains !> Read in gsi file with soil state increments (on the gaussian - !! grid), interpolate increments to the cubed-sphere tile, and - !! add to the soil states. Adapted from adjust_nsst. - !! Currently only coded for soil temperature. Soil moisture will - !! need the model soil moisture paramaters for regridding. - !! - !! Does not make a temperature update if snow differ - !! between fg and anal (allow correction of snow to - !! address temperature error first), or if snow is present - !! (will eventually updating of snow temperature in this case) + !! grid), interpolate increments to the cubed-sphere tile + !! Adapted from adjust_nsst. !! !! @param[inout] RLA Latitude on the cubed-sphere tile !! @param[inout] RLO Longitude on the cubed-sphere tile - !! @param[inout] STC_STATE Soil temperature state vector - !! @param[inout] SMC_STATE Soil moisture (liquid plus solid) state vector - !! @param[inout] SLC_STATE Liquid soil moisture state vector - !! @param[out] stc_updated Integer to record whether STC in each grid cell was updated - !! @param[out] slc_updated Integer to record whether SMC in each grid cell was updated !! @param[in] SOILSNOW_TILE Land mask for increments on the cubed-sphere tile - !! @param[in] SOILSNOW_FG_TILE First guess land mask for increments on the cubed-sphere tile !! @param[in] LENSFC Number of land points on a tile !! @param[in] LSOIL Number of soil layers + !! @param[in] LSOIL_INCR Number of soil layers (from top) to apply soil increments to !! @param[in] IDIM 'I' dimension of a tile !! @param[in] JDIM 'J' dimension of a tile !! @param[in] lsm Integer flag indicating which land model is used (1-Noah, 2-Noah-MP) !! @param[in] MYRANK MPI rank number + !! @param[out] stcinc Soil temperature increments on the cubed-sphere tile + !! @param[out] slcinc Liquid soil moisture increments on the cubed-sphere tile !! !! @author Clara Draper. @date March 2021 + !! @author Yuan Xue. @date Mar 2024 -subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, slc_updated, & - soilsnow_tile,soilsnow_fg_tile,lensfc,lsoil,idim,jdim,lsm, myrank) +subroutine gaussian_to_fv3_interp(lsoil_incr,rla,rlo, & + stcinc,slcinc,soilsnow_tile,lensfc,lsoil,idim,jdim,lsm, myrank) use utils use gdswzd_mod @@ -63,18 +54,17 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, implicit none - integer, intent(in) :: lensfc, lsoil, idim, jdim, myrank, lsm + integer, intent(in) :: lsoil_incr, lensfc, lsoil, idim, jdim, myrank, lsm - integer, intent(in) :: soilsnow_tile(lensfc), soilsnow_fg_tile(lensfc) + integer, intent(in) :: soilsnow_tile(lensfc) real, intent(inout) :: rla(lensfc), rlo(lensfc) - real, intent(inout) :: stc_state(lensfc, lsoil) - real, intent(inout) :: slc_state(lensfc, lsoil) - real, intent(inout) :: smc_state(lensfc, lsoil) - integer, intent(out) :: stc_updated(lensfc), slc_updated(lensfc) + + real, intent(out) :: stcinc(lensfc,lsoil) + real, intent(out) :: slcinc(lensfc,lsoil) integer :: iopt, nret, kgds_gaus(200) integer :: igaus, jgaus, ij - integer :: mask_tile, mask_fg_tile + integer :: mask_tile integer :: itile, jtile integer :: j, ierr integer :: igausp1, jgausp1 @@ -90,12 +80,9 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, real, allocatable :: dum2d(:,:), lats_rad(:), lons_rad(:) real, allocatable :: agrid(:,:,:), s2c(:,:,:) - integer :: k, nother, nsnowupd, nnosoilnear - integer :: nstcupd, nslcupd, nfrozen, nfrozen_upd - logical :: gaus_has_soil, soil_freeze, soil_ice + integer :: k + logical :: gaus_has_soil - stc_updated=0 - slc_updated=0 ! this produces the same lat/lon as can be read in from the file kgds_gaus = 0 @@ -122,12 +109,8 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, upd_slc=.true. endif - print* - print*,'adjust soil using gsi increments on gaussian grid' - print*,'updating soil temps', upd_stc - print*,'updating soil moisture', upd_slc - print*,'adjusting first ', lsoil_incr, ' surface layers only' - + print*,' Start interpolating first ', lsoil_incr, ' surface layers only' + !---------------------------------------------------------------------- ! call gdswzd to compute the lat/lon of each gsi gaussian grid point. !---------------------------------------------------------------------- @@ -189,35 +172,14 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, lons_rad, lats_rad, id1, id2, jdc, s2c, agrid ) deallocate(lons_rad, lats_rad, agrid) - ! - ! initialize variables for counts statitics to be zeros - ! - - ! - nother = 0 ! grid cells not land - nsnowupd = 0 ! grid cells with snow (temperature not yet updated) - nnosoilnear = 0 ! grid cells where model has soil, but 4 closest gaus grids don't - ! (no update made here) - nslcupd = 0 ! grid cells that are updated - nstcupd = 0 ! grid cells that are updated - nfrozen = 0 ! not update as frozen soil - nfrozen_upd = 0 ! not update as frozen soil + ! initialize matrix + stcinc = 0.0 + slcinc = 0.0 ij_loop : do ij = 1, lensfc mask_tile = soilsnow_tile(ij) - mask_fg_tile = soilsnow_fg_tile(ij) - - !---------------------------------------------------------------------- - ! mask: 1 - soil, 2 - snow, 0 - land-ice, -1 - not land - !---------------------------------------------------------------------- - - if (mask_tile <= 0) then ! skip if neither soil nor snow - nother = nother + 1 - cycle ij_loop - endif - ! get i,j index on array of (idim,jdim) from known ij @@ -225,15 +187,6 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, itile = mod(ij,idim) if (itile==0) itile = idim - !---------------------------------------------------------------------- - ! if snow is present before or after snow update, skip soil analysis - !---------------------------------------------------------------------- - - if (mask_fg_tile == 2 .or. mask_tile == 2) then - nsnowupd = nsnowupd + 1 - cycle ij_loop - endif - !---------------------------------------------------------------------- ! do update to soil temperature grid cells, using bilinear interp !---------------------------------------------------------------------- @@ -253,7 +206,6 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, soilsnow_gaus(igaus,jgausp1) == 1) gaus_has_soil = .true. if (.not. gaus_has_soil) then - nnosoilnear = nnosoilnear + 1 cycle ij_loop endif @@ -304,8 +256,123 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, ! normalize increments do k = 1, lsoil_incr stc_inc(k) = stc_inc(k) / wsum + stcinc(ij,k) = stc_inc(k) slc_inc(k) = slc_inc(k) / wsum + slcinc(ij,k) = slc_inc(k) enddo + + endif ! if soil/snow point + + enddo ij_loop + + write(*,'(a,i2)') ' Finish soil increments interpolation for rank : ', myrank + + deallocate(id1, id2, jdc, s2c) + +end subroutine gaussian_to_fv3_interp + + !> Read in soil state increments (on the cubed-sphere + !! grid),and add to the soil states. Adapted from original add_gsi_increment_soil routine. + !! + !! @param[in] SLCINC Liquid soil moisture increments on the cubed-sphere tile + !! @param[in] STCINC Soil temperature increments on the cubed-sphere tile + !! @param[inout] STC_STATE Soil temperature state vector + !! @param[inout] SMC_STATE Soil moisture (liquid plus solid) state vector + !! @param[inout] SLC_STATE Liquid soil moisture state vector + !! @param[out] stc_updated Integer to record whether STC in each grid cell was updated + !! @param[out] slc_updated Integer to record whether SMC in each grid cell was updated + !! @param[in] SOILSNOW_TILE Land mask for increments on the cubed-sphere tile + !! @param[in] SOILSNOW_FG_TILE First guess land mask for increments on the + !! cubed-sphere tile + !! @param[in] LENSFC Number of land points on a tile + !! @param[in] LSOIL Number of soil layers + !! @param[in] LSOIL_INCR Number of soil layers (from top) to apply soil increments to + !! @param[in] lsm Integer flag indicating which land model is used + !! @param[in] MYRANK MPI rank number + !! + !! @author Yuan Xue. 11/2023 + +subroutine add_increment_soil(lsoil_incr,stcinc,slcinc,stc_state,smc_state,slc_state,stc_updated,& + slc_updated,soilsnow_tile,soilsnow_fg_tile,lensfc,lsoil,lsm,myrank) + + use mpi + + implicit none + + integer, intent(in) :: lsoil_incr, lensfc, lsoil, myrank, lsm + + integer, intent(in) :: soilsnow_tile(lensfc), soilsnow_fg_tile(lensfc) + real, intent(inout) :: stc_state(lensfc, lsoil) + real, intent(inout) :: slc_state(lensfc, lsoil) + real, intent(inout) :: smc_state(lensfc, lsoil) + integer, intent(out) :: stc_updated(lensfc), slc_updated(lensfc) + + + integer :: ij + integer :: mask_tile, mask_fg_tile + logical :: upd_slc, upd_stc + + real :: stcinc(lensfc,lsoil) + real :: slcinc(lensfc,lsoil) + + integer :: k, nother, nsnowupd + integer :: nstcupd, nslcupd, nfrozen, nfrozen_upd + logical :: soil_freeze, soil_ice + + stc_updated=0 + slc_updated=0 + + if (lsm==lsm_noah) then + upd_stc=.true. + upd_slc=.false. ! not coded + elseif (lsm==lsm_noahmp) then + upd_stc=.true. + upd_slc=.true. + endif + + print* + print*,'adjust soil using increments on cubed-sphere tiles' + print*,'updating soil temps', upd_stc + print*,'updating soil moisture', upd_slc + print*,'adjusting first ', lsoil_incr, ' surface layers only' + + ! initialize variables for counts statitics to be zeros + nother = 0 ! grid cells not land + nsnowupd = 0 ! grid cells with snow (temperature not yet updated) + nslcupd = 0 ! grid cells that are updated + nstcupd = 0 ! grid cells that are updated + nfrozen = 0 ! not update as frozen soil + nfrozen_upd = 0 ! not update as frozen soil + + ij_loop : do ij = 1, lensfc + + mask_tile = soilsnow_tile(ij) + mask_fg_tile = soilsnow_fg_tile(ij) + + !---------------------------------------------------------------------- + ! mask: 1 - soil, 2 - snow, 0 - land-ice, -1 - not land + !---------------------------------------------------------------------- + + if (mask_tile <= 0) then ! skip if neither soil nor snow + nother = nother + 1 + cycle ij_loop + endif + + !---------------------------------------------------------------------- + ! if snow is present before or after snow update, skip soil analysis + !---------------------------------------------------------------------- + + if (mask_fg_tile == 2 .or. mask_tile == 2) then + nsnowupd = nsnowupd + 1 + cycle ij_loop + endif + + !---------------------------------------------------------------------- + ! do update to soil temperature grid cells + !---------------------------------------------------------------------- + + if (mask_tile == 1) then + !---------------------------------------------------------------------- ! add the interpolated increment to the background !---------------------------------------------------------------------- @@ -318,26 +385,27 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, if ( smc_state(ij,k) - slc_state(ij,k) > 0.001 ) soil_ice=.true. if (upd_stc) then - stc_state(ij,k) = stc_state(ij,k) + stc_inc(k) - if (k==1) then + stc_state(ij,k) = stc_state(ij,k) + stcinc(ij,k) + if (k==1) then stc_updated(ij) = 1 nstcupd = nstcupd + 1 endif endif - if ( (stc_state(ij,k) < tfreez) .and. (.not. soil_freeze) .and. (k==1) ) & - nfrozen_upd = nfrozen_upd + 1 + if ( (stc_state(ij,k) < tfreez) .and. (.not. soil_freeze) .and. (k==1) )& + nfrozen_upd = nfrozen_upd + 1 ! do not do updates if this layer or any above is frozen - if ( (.not. soil_freeze ) .and. (.not. soil_ice ) ) then - if (upd_slc) then - if (k==1) then + if ( (.not. soil_freeze ) .and. (.not. soil_ice ) ) then + if (upd_slc) then + if (k==1) then nslcupd = nslcupd + 1 slc_updated(ij) = 1 endif - ! apply zero limit here (higher, model-specific limits are later) - slc_state(ij,k) = max(slc_state(ij,k) + slc_inc(k), 0.0) - smc_state(ij,k) = max(smc_state(ij,k) + slc_inc(k), 0.0) + ! apply zero limit here (higher, model-specific limits are + ! later) + slc_state(ij,k) = max(slc_state(ij,k) + slcinc(ij,k), 0.0) + smc_state(ij,k) = max(smc_state(ij,k) + slcinc(ij,k), 0.0) endif else if (k==1) nfrozen = nfrozen+1 @@ -347,22 +415,20 @@ subroutine add_increment_soil(rla,rlo,stc_state,smc_state,slc_state,stc_updated, endif ! if soil/snow point - enddo ij_loop - - write(*,'(a,i2)') 'statistics of grids number processed for rank : ', myrank - write(*,'(a,i8)') ' soil grid total', lensfc - write(*,'(a,i8)') ' soil grid cells slc updated = ',nslcupd - write(*,'(a,i8)') ' soil grid cells stc updated = ',nstcupd - write(*,'(a,i8)') ' soil grid cells not updated, frozen = ',nfrozen - write(*,'(a,i8)') ' soil grid cells update, became frozen = ',nfrozen_upd - write(*,'(a,i8)') ' (not updated) soil grid cells, no soil nearby on gsi grid = ',nnosoilnear - write(*,'(a,i8)') ' (not updated yet) snow grid cells = ', nsnowupd - write(*,'(a,i8)') ' grid cells, without soil or snow = ', nother + enddo ij_loop - deallocate(id1, id2, jdc, s2c) + write(*,'(a,i2)') ' statistics of grids number processed for rank : ', myrank + write(*,'(a,i8)') ' soil grid total', lensfc + write(*,'(a,i8)') ' soil grid cells slc updated = ',nslcupd + write(*,'(a,i8)') ' soil grid cells stc updated = ',nstcupd + write(*,'(a,i8)') ' soil grid cells not updated, frozen = ',nfrozen + write(*,'(a,i8)') ' soil grid cells update, became frozen = ',nfrozen_upd + write(*,'(a,i8)') ' (not updated yet) snow grid cells = ', nsnowupd + write(*,'(a,i8)') ' grid cells, without soil or snow = ', nother end subroutine add_increment_soil + !> Add snow depth increment to model snow depth state, !! and limit output to be non-negative. JEDI increments are !! calculated globally, so must be screened to land-only locations @@ -375,7 +441,7 @@ end subroutine add_increment_soil !! !! @author Clara Draper. @date August 2021 -subroutine add_increment_snow(snd_inc,mask,lensfc,snd) +subroutine add_jedi_increment_snow(snd_inc,mask,lensfc,snd) implicit none @@ -393,25 +459,26 @@ subroutine add_increment_snow(snd_inc,mask,lensfc,snd) endif enddo -end subroutine add_increment_snow +end subroutine add_jedi_increment_snow !> Calculate soil mask for land on model grid. !! Output is 1 - soil, 2 - snow-covered, 0 - land ice, -1 not land. !! !! @param[in] lensfc Number of land points for this tile !! @param[in] veg_type_landice Value of vegetion class that indicates land-ice -!! @param[in] smc Model soil moisture. +!! @param[in] stype Soil type !! @param[in] swe Model snow water equivalent !! @param[in] vtype Model vegetation type !! @param[out] mask Land mask for increments !! @author Clara Draper @date March 2021 -subroutine calculate_landinc_mask(smc,swe,vtype,lensfc,veg_type_landice,mask) +!! @author Yuan Xue: introduce stype to make the mask calculation more generic +subroutine calculate_landinc_mask(swe,vtype,stype,lensfc,veg_type_landice,mask) implicit none integer, intent(in) :: lensfc, veg_type_landice - real, intent(in) :: smc(lensfc), swe(lensfc) - real, intent(in) :: vtype(lensfc) + real, intent(in) :: swe(lensfc) + real, intent(in) :: vtype(lensfc),stype(lensfc) integer, intent(out) :: mask(lensfc) integer :: i @@ -420,7 +487,7 @@ subroutine calculate_landinc_mask(smc,swe,vtype,lensfc,veg_type_landice,mask) ! land (but not land-ice) do i=1,lensfc - if (smc(i) .LT. 0.99) then + if (nint(stype(i)) .GT. 0) then if (swe(i) .GT. 0.001) then ! snow covered land mask(i) = 2 else ! non-snow covered land @@ -439,18 +506,19 @@ end subroutine calculate_landinc_mask !! if full for Noah LSM. !! For Noah LSM, copy relevent code blocks from model code (same as has !! been done in sfc_sub). -!! For Noah-MP, have inserted place-holders to simply reset the model -!! variables back to the analysis if adjustments are needed. Later, will replace -!! this with appropriate adjustmenets (in summary, for now we do not -!! make STC updates if soils are frozen, and are also not applying the -!! appropriate max. values for SMC). -!! Here: adjust (frozen) soil moisture to be consistent with changes in -!! soil temperature from DA +!! For Noah-MP, the adjustment scheme shown below as of 11/09/2023: +!! Case 1: frozen ==> frozen, recalculate slc following opt_frz=1, smc remains +!! Case 2: unfrozen ==> frozen, recalculate slc following opt_frz=1, smc remains +!! Case 3: frozen ==> unfrozen, melt all soil ice (if any) +!! Case 4: unfrozen ==> unfrozen along with other cases, (e.g., soil temp=tfrz),do nothing +!! Note: For Case 3, Yuan Xue thoroughly evaluated a total of four options and +!! current option is found to be the best as of 11/09/2023 !! @param[in] lsm Integer code for the LSM !! @param[in] isot Integer code for the soil type data set !! @param[in] ivegsrc Integer code for the vegetation type data set !! @param[in] lensfc Number of land points for this tile !! @param[in] lsoil Number of soil layers +!! @param[in] lsoil_incr Number of soil layers (from top) to apply soil increments to !! @param[in] rsoiltype Array of input soil types !! @param[in] mask Mask indicating surface type !! @param[in] stc_bck Background soil temperature states @@ -462,17 +530,17 @@ end subroutine calculate_landinc_mask !! @param[in] zsoil Depth of bottom of each soil layer !! @author Clara Draper @date April 2021 -subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & +subroutine apply_land_da_adjustments_soil(lsoil_incr, lsm, isot, ivegsrc,lensfc, & lsoil, rsoiltype, mask, stc_bck, stc_adj, smc_adj, slc_adj, & stc_updated, slc_updated, zsoil) use mpi - use set_soilveg_snippet_mod, only: set_soilveg + use set_soilveg_snippet_mod, only: set_soilveg_noah,set_soilveg_noahmp use sflx_snippet, only: frh2o implicit none - integer, intent(in) :: lsm, lensfc, lsoil, isot, ivegsrc + integer, intent(in) :: lsoil_incr, lsm, lensfc, lsoil, isot, ivegsrc real, intent(in) :: rsoiltype(lensfc) ! soil types, as real integer, intent(in) :: mask(lensfc) real, intent(in) :: stc_bck(lensfc, lsoil) @@ -485,7 +553,7 @@ subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & logical :: frzn_bck, frzn_anl logical :: soil_freeze, soil_ice - integer :: i, l, n_freeze, n_thaw, ierr, n_revert + integer :: i, l, n_freeze, n_thaw, ierr integer :: myrank, soiltype, iret, n_stc, n_slc logical :: upd_slc, upd_stc @@ -495,6 +563,10 @@ subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & real, dimension(30) :: maxsmc, bb, satpsi real, dimension(4) :: dz ! layer thickness + real, parameter :: hfus=0.3336e06 !< latent heat of fusion(j/kg) + real, parameter :: grav=9.80616 !< gravity accel.(m/s2) + real :: smp !< for computing supercooled water + call mpi_comm_rank(mpi_comm_world, myrank, ierr) if (lsm==lsm_noah) then @@ -508,9 +580,9 @@ subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & select case (lsm ) case(lsm_noah) ! initialise soil properties - call set_soilveg(isot, ivegsrc, maxsmc, bb, satpsi, iret) + call set_soilveg_noah(isot, ivegsrc, maxsmc, bb, satpsi, iret) if (iret < 0) then - print *, 'FATAL ERROR: problem in set_soilveg' + print *, 'FATAL ERROR: problem in set_soilveg_noah' call mpi_abort(mpi_comm_world, 10, ierr) endif @@ -549,30 +621,39 @@ subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & if (upd_stc) then - print *, 'Reverting frozen noah-mp model stc back to background' - n_revert=0 + call set_soilveg_noahmp(isot, ivegsrc, maxsmc, bb, satpsi, iret) + if (iret < 0) then + print *, 'FATAL ERROR: problem in set_soilveg_noahmp' + call mpi_abort(mpi_comm_world, 10, ierr) + endif + n_stc = 0 n_slc = 0 do i=1,lensfc - if (stc_updated(i) == 1 ) then + if (stc_updated(i) == 1 ) then ! soil-only location n_stc = n_stc+1 - ! remove soil temperature increments if frozen - soil_freeze=.false. - soil_ice=.false. + soiltype = nint(rsoiltype(i)) do l = 1, lsoil_incr - if ( min(stc_bck(i,l),stc_adj(i,l)) < tfreez) soil_freeze=.true. - if ( smc_adj(i,l) - slc_adj(i,l) > 0.001 ) soil_ice=.true. - if ( soil_freeze .or. soil_ice ) then - ! for now, revert update. Later, adjust SMC/SLC for update. - if (l==1) n_revert = n_revert+1 - stc_adj(i,l)=stc_bck(i,l) + !case 1: frz ==> frz, recalculate slc, smc remains + !case 2: unfrz ==> frz, recalculate slc, smc remains + !both cases are considered in the following if case + if (stc_adj(i,l) .LT. tfreez )then + !recompute supercool liquid water,smc_anl remain unchanged + smp = hfus*(tfreez-stc_adj(i,l))/(grav*stc_adj(i,l)) !(m) + slc_new=maxsmc(soiltype)*(smp/satpsi(soiltype))**(-1./bb(soiltype)) + slc_adj(i,l) = max( min( slc_new, smc_adj(i,l)), 0.0 ) + endif + !case 3: frz ==> unfrz, melt all soil ice (if any) + if (stc_adj(i,l) .GT. tfreez )then !do not rely on stc_bck + slc_adj(i,l)=smc_adj(i,l) endif enddo - endif + endif enddo endif + if (upd_slc) then dz(1) = -zsoil(1) @@ -604,7 +685,6 @@ subroutine apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & write(*,'(a,i8)') ' soil grid total', lensfc write(*,'(a,i8)') ' soil grid cells with slc update', n_slc write(*,'(a,i8)') ' soil grid cells with stc update', n_stc - write(*,'(a,i8)') ' soil grid cells reverted', n_revert end subroutine apply_land_da_adjustments_soil diff --git a/sorc/global_cycle.fd/read_write_data.f90 b/sorc/global_cycle.fd/read_write_data.f90 index 71dabb465..5221f840e 100644 --- a/sorc/global_cycle.fd/read_write_data.f90 +++ b/sorc/global_cycle.fd/read_write_data.f90 @@ -79,6 +79,7 @@ MODULE READ_WRITE_DATA !! @param[in] lensfc Total number of points on a tile. !! @param[in] lsoil Number of soil layers. !! @param[in] do_nsst When true, nsst fields were processed. + !! @param[in] inc_file When true, write out increments to files !! @param[in] nsst Data structure containing nsst fields. !! @param[in] slifcs Land-sea mask. !! @param[in] tsffcs Skin temperature. @@ -114,17 +115,20 @@ MODULE READ_WRITE_DATA !! @param[in] slcfcs Liquid portion of volumetric soil moisture. !! @param[in] smcfcs Total volumetric soil moisture. !! @param[in] stcfcs Soil temperature. + !! @param[in] stcinc Soil temperature increments on the cubed-sphere tiles + !! @param[in] slcinc Liquid soil moisture increments on the cubed-sphere tiles !! !! @author George Gayno NOAA/EMC subroutine write_data(lensfc,idim,jdim,lsoil, & - do_nsst,nsst,slifcs,tsffcs,vegfcs,swefcs, & + do_nsst,inc_file,nsst,slifcs,tsffcs,vegfcs,swefcs, & tg3fcs,zorfcs,albfcs,alffcs, & cnpfcs,f10m,t2m,q2m,vetfcs, & sotfcs,ustar,fmm,fhh,sicfcs, & sihfcs,sitfcs,tprcp,srflag, & swdfcs,vmnfcs,vmxfcs,slpfcs, & - absfcs,slcfcs,smcfcs,stcfcs) + absfcs,slcfcs,smcfcs,stcfcs, & + stcinc, slcinc) use mpi @@ -134,6 +138,7 @@ subroutine write_data(lensfc,idim,jdim,lsoil, & integer, intent(in) :: idim, jdim logical, intent(in) :: do_nsst + logical, intent(in) :: inc_file real, intent(in), optional :: slifcs(lensfc),tsffcs(lensfc) real, intent(in), optional :: swefcs(lensfc),tg3fcs(lensfc) @@ -150,10 +155,12 @@ subroutine write_data(lensfc,idim,jdim,lsoil, & real, intent(in), optional :: vmxfcs(lensfc), slpfcs(lensfc) real, intent(in), optional :: absfcs(lensfc), slcfcs(lensfc,lsoil) real, intent(in), optional :: smcfcs(lensfc,lsoil), stcfcs(lensfc,lsoil) + real, intent(in), optional :: stcinc(lensfc,lsoil) + real, intent(in), optional :: slcinc(lensfc,lsoil) type(nsst_data), intent(in) :: nsst - integer :: dim_x, dim_y, dim_time, dims_3d(3) + integer :: dim_x, dim_y, dim_soil, dim_time, dims_3d(3) real :: dum2d(idim,jdim), dum3d(idim,jdim,lsoil) @@ -161,11 +168,14 @@ subroutine write_data(lensfc,idim,jdim,lsoil, & character(len=3) :: rankch integer :: myrank, error, ncid, id_var + integer :: varid_stc, varid_slc call mpi_comm_rank(mpi_comm_world, myrank, error) write(rankch, '(i3.3)') (myrank+1) + if (.NOT.(inc_file)) then + fnbgso = "./fnbgso." // rankch print* @@ -472,6 +482,47 @@ subroutine write_data(lensfc,idim,jdim,lsoil, & call remove_checksum(ncid, id_var) endif + else + + fnbgso = "./gaussian_interp." // rankch + print* + print*,"Write increments onto cubed sphere tiles to: ", trim(fnbgso) + + error=nf90_create(trim(fnbgso),NF90_64BIT_OFFSET,ncid) + CALL netcdf_err(error, 'OPENING FILE: '//trim(fnbgso) ) + + ! Define dimensions in the file. + error = nf90_def_dim(ncid, "xaxis_1", idim, dim_x) + call netcdf_err(error, 'defining xaxis_1') + + error = nf90_def_dim(ncid, "yaxis_1", jdim, dim_y) + call netcdf_err(error, 'defining yaxis_1') + + error = nf90_def_dim(ncid, "soil_levels",lsoil, dim_soil) + call netcdf_err(error, 'defining soil_levels') + + ! Define variables in the file. + error=nf90_def_var(ncid, "slc_inc", NF90_DOUBLE, & + (/dim_x,dim_y,dim_soil/),varid_slc) + call netcdf_err(error, 'defining slc_inc'); + + error=nf90_def_var(ncid, "stc_inc", NF90_DOUBLE, & + (/dim_x,dim_y,dim_soil/),varid_stc) + call netcdf_err(error, 'defining stc_inc'); + + error = nf90_enddef(ncid) + + ! Put variables in the file. + dum3d = reshape(stcinc, (/idim,jdim,lsoil/)) + error = nf90_put_var( ncid, varid_stc, dum3d) + call netcdf_err(error, 'writing stc_inc record' ) + + dum3d = reshape(slcinc, (/idim,jdim,lsoil/)) + error = nf90_put_var( ncid, varid_slc, dum3d) + call netcdf_err(error, 'writing slc_inc record' ) + + endif + if(do_nsst) then error=nf90_inq_varid(ncid, "tref", id_var) @@ -984,8 +1035,11 @@ END SUBROUTINE READ_GSI_DATA !! @param[in] LSOIL Number of soil layers. !! @param[in] LENSFC Total number of points on a tile. !! @param[in] DO_NSST When true, nsst fields are read. + !! @param[in] DO_SNO_INC_JEDI When true, read in snow increment file + !! @param[in] DO_SOI_INC_JEDI When true, read in soil increment file !! @param[in] INC_FILE When true, read from an increment file. !! False reads from a restart file. + !! increments are on the cubed-sphere tiles !! @param[out] IS_NOAHMP When true, process for the Noah-MP LSM. !! @param[out] TSFFCS Skin Temperature. !! @param[out] SMCFCS Total volumetric soil moisture. @@ -1025,8 +1079,13 @@ END SUBROUTINE READ_GSI_DATA !! @param[out] SLMASK Land-sea mask without ice flag. !! @param[out] ZSOIL Soil layer thickness. !! @param[out] NSST Data structure containing nsst fields. + !! @param[in] SLCINC Liquid soil moisture increments on the cubed-sphere tiles + !! @param[in] STCINC Soil temperature increments on the cubed-sphere tiles !! @author George Gayno NOAA/EMC - SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & + !! @author Yuan Xue: add capability to read soil related increments on the + !! cubed-sphere tiles directly + SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,DO_SNO_INC_JEDI,& + DO_SOI_INC_JEDI,INC_FILE,IS_NOAHMP, & TSFFCS,SMCFCS,SWEFCS,STCFCS, & TG3FCS,ZORFCS, & CVFCS,CVBFCS,CVTFCS,ALBFCS, & @@ -1036,6 +1095,7 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & SIHFCS,SICFCS,SITFCS, & TPRCP,SRFLAG,SNDFCS, & VMNFCS,VMXFCS,SLCFCS, & + STCINC,SLCINC, & SLPFCS,ABSFCS,T2M,Q2M,SLMASK, & ZSOIL,NSST) USE MPI @@ -1044,6 +1104,7 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & INTEGER, INTENT(IN) :: LSOIL, LENSFC LOGICAL, INTENT(IN) :: DO_NSST, INC_FILE + LOGICAL, INTENT(IN) :: DO_SNO_INC_JEDI, DO_SOI_INC_JEDI LOGICAL, OPTIONAL, INTENT(OUT) :: IS_NOAHMP @@ -1065,6 +1126,8 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & REAL, OPTIONAL, INTENT(OUT) :: SLCFCS(LENSFC,LSOIL) REAL, OPTIONAL, INTENT(OUT) :: SMCFCS(LENSFC,LSOIL) REAL, OPTIONAL, INTENT(OUT) :: STCFCS(LENSFC,LSOIL) + REAL, OPTIONAL, INTENT(OUT) :: STCINC(LENSFC,LSOIL) + REAL, OPTIONAL, INTENT(OUT) :: SLCINC(LENSFC,LSOIL) REAL(KIND=4), OPTIONAL, INTENT(OUT) :: ZSOIL(LSOIL) TYPE(NSST_DATA), OPTIONAL :: NSST ! intent(out) will crash @@ -1083,8 +1146,10 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & WRITE(RANKCH, '(I3.3)') (MYRANK+1) - IF (INC_FILE) THEN - FNBGSI = "./xainc." // RANKCH + IF ((INC_FILE) .and. (DO_SNO_INC_JEDI)) THEN + FNBGSI = "./snow_xainc." // RANKCH + ELSEIF ((INC_FILE) .and. (DO_SOI_INC_JEDI)) THEN + FNBGSI = "./soil_xainc." // RANKCH ELSE FNBGSI = "./fnbgsi." // RANKCH ENDIF @@ -1095,6 +1160,20 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & ERROR=NF90_OPEN(TRIM(FNBGSI),NF90_NOWRITE,NCID) CALL NETCDF_ERR(ERROR, 'OPENING FILE: '//TRIM(FNBGSI) ) + IF ((INC_FILE) .and. (DO_SOI_INC_JEDI)) THEN + + ERROR=NF90_INQ_DIMID(NCID, 'grid_xt', ID_DIM) + CALL NETCDF_ERR(ERROR, 'READING grid_xt' ) + ERROR=NF90_INQUIRE_DIMENSION(NCID,ID_DIM,LEN=IDIM) + CALL NETCDF_ERR(ERROR, 'READING grid_xt' ) + + ERROR=NF90_INQ_DIMID(NCID, 'grid_yt', ID_DIM) + CALL NETCDF_ERR(ERROR, 'READING grid_yt' ) + ERROR=NF90_INQUIRE_DIMENSION(NCID,ID_DIM,LEN=JDIM) + CALL NETCDF_ERR(ERROR, 'READING grid_yt' ) + + ELSE + ERROR=NF90_INQ_DIMID(NCID, 'xaxis_1', ID_DIM) CALL NETCDF_ERR(ERROR, 'READING xaxis_1' ) ERROR=NF90_INQUIRE_DIMENSION(NCID,ID_DIM,LEN=IDIM) @@ -1105,6 +1184,8 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & ERROR=NF90_INQUIRE_DIMENSION(NCID,ID_DIM,LEN=JDIM) CALL NETCDF_ERR(ERROR, 'READING yaxis_1' ) + ENDIF + IF ((IDIM*JDIM) /= LENSFC) THEN PRINT*,'FATAL ERROR: DIMENSIONS WRONG.' CALL MPI_ABORT(MPI_COMM_WORLD, 88, IERR) @@ -1512,6 +1593,22 @@ SUBROUTINE READ_DATA(LSOIL,LENSFC,DO_NSST,INC_FILE,IS_NOAHMP, & STCFCS = RESHAPE(DUMMY3D, (/LENSFC,LSOIL/)) ENDIF + IF (PRESENT(SLCINC)) THEN + ERROR=NF90_INQ_VARID(NCID, "soill", ID_VAR) + CALL NETCDF_ERR(ERROR, 'READING soill ID' ) + ERROR=NF90_GET_VAR(NCID, ID_VAR, dummy3d) + CALL NETCDF_ERR(ERROR, 'READING slc increments' ) + SLCINC = RESHAPE(DUMMY3D, (/LENSFC,LSOIL/)) + ENDIF + + IF (PRESENT(STCINC)) THEN + ERROR=NF90_INQ_VARID(NCID, "soilt", ID_VAR) + CALL NETCDF_ERR(ERROR, 'READING soilt ID' ) + ERROR=NF90_GET_VAR(NCID, ID_VAR, dummy3d) + CALL NETCDF_ERR(ERROR, 'READING stc increments' ) + STCINC = RESHAPE(DUMMY3D, (/LENSFC,LSOIL/)) + ENDIF + DEALLOCATE(DUMMY3D) ! cloud fields not in warm restart files. set to zero? diff --git a/sorc/grid_tools.fd/filter_topo.fd/CMakeLists.txt b/sorc/grid_tools.fd/filter_topo.fd/CMakeLists.txt index 0f1db12b7..e8789caaf 100644 --- a/sorc/grid_tools.fd/filter_topo.fd/CMakeLists.txt +++ b/sorc/grid_tools.fd/filter_topo.fd/CMakeLists.txt @@ -24,6 +24,10 @@ target_link_libraries( PUBLIC NetCDF::NetCDF_Fortran) +if(OpenMP_Fortran_FOUND) + target_link_libraries(filter_topo_lib PUBLIC OpenMP::OpenMP_Fortran) +endif() + target_link_libraries(${exe_name} PRIVATE filter_topo_lib) install(TARGETS ${exe_name}) diff --git a/sorc/grid_tools.fd/filter_topo.fd/filter_topo.F90 b/sorc/grid_tools.fd/filter_topo.fd/filter_topo.F90 index d4b5d672a..4bc01b448 100644 --- a/sorc/grid_tools.fd/filter_topo.fd/filter_topo.F90 +++ b/sorc/grid_tools.fd/filter_topo.fd/filter_topo.F90 @@ -9,6 +9,7 @@ !! @author Zhi Liang (GFDL) who packaged it into a standalone application. program filter_topo + use omp_lib use utils implicit none @@ -34,7 +35,7 @@ program filter_topo real:: peak_fac ! overshoot factor for the mountain peak real:: max_slope ! max allowable terrain slope: 1 --> 45 deg - integer :: n_del2_weak + integer :: n_del2_weak, tid, nthreads integer :: ntiles = 0 @@ -50,6 +51,17 @@ program filter_topo integer :: is,ie,js,je,isd,ied,jsd,jed integer,parameter :: ng = 3 integer :: nx, ny, npx, npy, nx_nest, ny_nest, npx_nest, npy_nest, is_nest, ie_nest, js_nest, je_nest, isd_nest, ied_nest, jsd_nest, jed_nest + +!$OMP PARALLEL PRIVATE(TID) + tid = omp_get_thread_num() + if (tid == 0) then + nthreads = omp_get_num_threads() + print* + print*,'- BEGIN EXECUTION WITH NUMBER OF THREADS = ',nthreads + print* + endif +!$OMP END PARALLEL + !--- read namelist call read_namelist() @@ -69,6 +81,9 @@ program filter_topo !--- write out the data call write_topo_file(is,ie,js,je,ntiles,oro(is:ie,js:je,:),regional ) + print* + print*,'- NORMAL TERMINATION.' + contains !> ??? @@ -760,6 +775,7 @@ subroutine read_grid_file(regional) geolat_t(:,:,:) = -1.e25 do t = 1, ntiles +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2,G3,G4,G5) do j=js,je ; do i=is,ie g1(1) = geolon_c(i,j,t); g1(2) = geolat_c(i,j,t) g2(1) = geolon_c(i+1,j,t); g2(2) = geolat_c(i+1,j,t) @@ -769,8 +785,8 @@ subroutine read_grid_file(regional) geolon_t(i,j,t) = g5(1) geolat_t(i,j,t) = g5(2) enddo ; enddo +!$OMP END PARALLEL DO enddo - if( .not. regional ) then call fill_cubic_grid_halo(geolon_t, geolon_t, ng, 0, 0, 1, 1) @@ -783,6 +799,7 @@ subroutine read_grid_file(regional) allocate(dx(isd:ied,jsd:jed+1,ntiles)) allocate(dy(isd:ied+1,jsd:jed,ntiles)) do t = 1, ntiles +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2) do j = js, je+1 ; do i = is, ie g1(1) = geolon_c(i ,j,t) g1(2) = geolat_c(i ,j,t) @@ -790,8 +807,10 @@ subroutine read_grid_file(regional) g2(2) = geolat_c(i+1,j,t) dx(i,j,t) = great_circle_dist( g2, g1, radius ) enddo ; enddo +!$OMP END PARALLEL DO enddo do t = 1, ntiles +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2) do j = js, je do i = is, ie+1 g1(1) = geolon_c(i,j, t) @@ -801,6 +820,7 @@ subroutine read_grid_file(regional) dy(i,j,t) = great_circle_dist( g2, g1, radius ) enddo enddo +!$OMP END PARALLEL DO enddo if( .not. regional ) then @@ -831,6 +851,7 @@ subroutine read_grid_file(regional) allocate(dxa(isd:ied,jsd:jed,ntiles)) allocate(dya(isd:ied,jsd:jed,ntiles)) do t = 1, ntiles +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2,G3,G4) do j=js,je ; do i=is,ie g1(1) = geolon_c(i,j,t); g1(2) = geolat_c(i,j,t) g2(1) = geolon_c(i,j+1,t); g2(2) = geolat_c(i,j+1,t) @@ -847,6 +868,7 @@ subroutine read_grid_file(regional) call mid_pt_sphere(g1, g2, g4) dya(i,j,t) = great_circle_dist( g4, g3, radius ) enddo; enddo +!$OMP END PARALLEL DO enddo if( .not.regional ) then @@ -860,6 +882,8 @@ subroutine read_grid_file(regional) allocate(dxc(isd:ied+1,jsd:jed,ntiles)) allocate(dyc(isd:ied,jsd:jed+1,ntiles)) do t = 1, ntiles + +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2) do j=jsd,jed do i=isd+1,ied g1(1) = geolon_c(i,j,t); g1(2) = geolat_c(i,j,t) @@ -869,7 +893,9 @@ subroutine read_grid_file(regional) dxc(isd,j,t) = dxc(isd+1,j,t) dxc(ied+1,j,t) = dxc(ied,j,t) enddo +!$OMP END PARALLEL DO +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,G2) do j=jsd+1,jed do i=isd,ied g1(1) = geolon_c(i,j,t); g1(2) = geolat_c(i,j,t) @@ -877,15 +903,20 @@ subroutine read_grid_file(regional) dyc(i,j,t) = great_circle_dist(g1, g2, radius) enddo enddo +!$OMP END PARALLEL DO + +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I) do i=isd,ied dyc(i,jsd,t) = dyc(i,jsd+1,t) dyc(i,jed+1,t) = dyc(i,jed,t) end do +!$OMP END PARALLEL DO enddo !--- compute area allocate(area(isd:ied,jsd:jed,ntiles)) do t = 1, ntiles +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,p_lL,p_uL,p_lr,p_uR) do j=js,je do i=is,ie p_lL(1) = geolon_c(i ,j ,t) ; p_lL(2) = geolat_c(i ,j ,t) @@ -897,6 +928,7 @@ subroutine read_grid_file(regional) area(i,j,t) = get_area(p_lL, p_uL, p_lR, p_uR, radius) enddo enddo +!$OMP END PARALLEL DO enddo if( .not.regional ) then @@ -918,6 +950,8 @@ subroutine read_grid_file(regional) ! | | ! 6---2---7 do t = 1, ntiles + +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1) do j=js,je+1 do i = is,ie+1 g1(1) = geolon_c(i,j,t) @@ -925,6 +959,9 @@ subroutine read_grid_file(regional) call latlon2xyz(g1, grid3(:,i,j)) enddo enddo +!$OMP END PARALLEL DO + +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J,G1,P1,P3) do j=js,je do i=is,ie g1(1) = geolon_t(i,j,t); g1(2) = geolat_t(i,j,t) @@ -939,13 +976,16 @@ subroutine read_grid_file(regional) cos_sg(4,i,j) = cos_angle( p1, grid3(1,i,j+1), p3 ) enddo enddo +!$OMP END PARALLEL DO do ip=1,4 +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,J) do j=js,je do i=is,ie sin_sg(ip,i,j,t) = min(1.0, sqrt( max(0., 1.-cos_sg(ip,i,j)**2) ) ) enddo enddo +!$OMP END PARALLEL DO enddo enddo diff --git a/sorc/lsm_routines.fd/noah.fd/set_soilveg_snippet.f90 b/sorc/lsm_routines.fd/noah.fd/set_soilveg_snippet.f90 index 0ba432ba1..ec2200d92 100644 --- a/sorc/lsm_routines.fd/noah.fd/set_soilveg_snippet.f90 +++ b/sorc/lsm_routines.fd/noah.fd/set_soilveg_snippet.f90 @@ -5,13 +5,21 @@ !> Below was extracted from namelist_soilveg.f and set_soilveg.f !! (couldn't get above to compile for doxygen) +!> Add Noah-MP LSM soil and veg params needed for global_cycle +!> Noah-MP related parameters were extracted from noahmp_table.f +!> isot (soil type) = 1: STATSGO must be selected if NoahMP is used +!> ivet (vegetation type) = 1: IBGP is used by UFS offline Land DA for Noah-MP +!> as of 07/13/2023 +!> @author Yuan Xue + module set_soilveg_snippet_mod implicit none private - public set_soilveg + public set_soilveg_noah + public set_soilveg_noahmp contains @@ -23,7 +31,7 @@ module set_soilveg_snippet_mod !! @param[out] bb B exponent for each soil type !! @param[out] satpsi Saturated matric potential for each soil type !! @param[out] iret Return integer -subroutine set_soilveg(isot,ivet, maxsmc, bb, satpsi, iret) +subroutine set_soilveg_noah(isot,ivet, maxsmc, bb, satpsi, iret) implicit none integer, intent(in) :: isot,ivet @@ -85,6 +93,51 @@ subroutine set_soilveg(isot,ivet, maxsmc, bb, satpsi, iret) iret = 0 -end subroutine set_soilveg +end subroutine set_soilveg_noah + +!> This subroutine initializes soil and vegetation +!! parameters needed in global_cycle/land_increment.f90 for noah-mp +!! @param[in] isot Soil type +!! @param[in] ivet Vegetation type +!! @param[out] maxsmc Maximum soil moisture for each soil type +!! @param[out] bb B exponent for each soil type +!! @param[out] satpsi Saturated matric potential for each soil type +!! @param[out] iret Return integer +subroutine set_soilveg_noahmp(isot,ivet, maxsmc, bb, satpsi,iret) + + implicit none + + integer, intent(in) :: isot,ivet !ivet is *not* used for now + real, dimension(30), intent(out) :: maxsmc, bb, satpsi + integer, intent(out) :: iret + + if (isot .eq. 1) then + +! set soil-dependent params (STATSGO is the only option for UFS, 07/13/2023) + maxsmc= (/0.339, 0.421, 0.434, 0.476, 0.484,& + & 0.439, 0.404, 0.464, 0.465, 0.406, 0.468, 0.468, & + & 0.439, 1.000, 0.200, 0.421, 0.468, 0.200, & + & 0.339, 0.339, 0.000, 0.000, 0.000, 0.000, & + & 0.000, 0.000, 0.000, 0.000, 0.000, 0.000/) + bb= (/2.79, 4.26, 4.74, 5.33, 3.86, 5.25,& + & 6.77, 8.72, 8.17, 10.73, 10.39, 11.55, & + & 5.25, 0.0, 2.79, 4.26, 11.55, 2.79, & + & 2.79, 0.00, 0.00, 0.00, 0.00, 0.00, & + & 0.00, 0.00, 0.00, 0.00, 0.00, 0.00/) + satpsi= (/0.069, 0.036, 0.141, 0.759, 0.955, & + & 0.355, 0.135, 0.617, 0.263, 0.098, 0.324, 0.468, & + & 0.355, 0.00, 0.069, 0.036, 0.468, 0.069, & + & 0.069, 0.00, 0.00, 0.00, 0.00, 0.00, & + & 0.00, 0.00, 0.00, 0.00, 0.00, 0.00/) + + else + print*, 'For Noah-MP, set_soilveg is not supported for soil type ', isot + iret = -1 + return + + endif + + iret = 0 +end subroutine set_soilveg_noahmp end module set_soilveg_snippet_mod diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh index 9d7267697..18388c66f 100644 --- a/sorc/machine-setup.sh +++ b/sorc/machine-setup.sh @@ -38,43 +38,17 @@ elif [[ -d /scratch1 ]] ; then fi target=hera module purge -elif [[ -d /lustre && -d /ncrc ]] ; then +elif [[ -d /gpfs && -d /ncrc ]] ; then # We are on GAEA. if ( ! eval module help > /dev/null 2>&1 ) ; then - # We cannot simply load the module command. The GAEA - # /etc/profile modifies a number of module-related variables - # before loading the module command. Without those variables, - # the module command fails. Hence we actually have to source - # /etc/profile here. - source /etc/profile - __ms_source_etc_profile=yes - else - __ms_source_etc_profile=no - fi - module purge > /dev/null 2>&1 - module purge -# clean up after purge - unset _LMFILES_ - unset _LMFILES_000 - unset _LMFILES_001 - unset LOADEDMODULES - module load modules - if [[ -d /opt/cray/ari/modulefiles ]] ; then - module use -a /opt/cray/ari/modulefiles - fi - if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then - module use -a /opt/cray/pe/ari/modulefiles - fi - if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then - module use -a /opt/cray/pe/craype/default/modulefiles - fi - if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then - source /etc/opt/cray/pe/admin-pe/site-config - fi - if [[ "$__ms_source_etc_profile" == yes ]] ; then + # We cannot simply load the module command. The GAEA + # /etc/profile modifies a number of module-related variables + # before loading the module command. Without those variables, + # the module command fails. Hence we actually have to source + # /etc/profile here. source /etc/profile - unset __ms_source_etc_profile fi + module reset target=gaea elif [[ "$(hostname)" =~ "Orion" ]]; then target="orion" diff --git a/sorc/orog_mask_tools.fd/orog.fd/mtnlm7_oclsm.F b/sorc/orog_mask_tools.fd/orog.fd/mtnlm7_oclsm.F index 9e65956d5..f05c39aa5 100644 --- a/sorc/orog_mask_tools.fd/orog.fd/mtnlm7_oclsm.F +++ b/sorc/orog_mask_tools.fd/orog.fd/mtnlm7_oclsm.F @@ -77,28 +77,19 @@ character(len=256) :: INPUTOROG = "none" character(len=256) :: merge_file = "none" logical :: mask_only = .false. - integer :: MTNRES,IM,JM,NM,NR,NF0,NF1,EFAC,BLAT,NW + integer :: MTNRES,IM,JM,NM,NR,NF0,NF1,EFAC,NW fsize=65536 - READ(5,*) MTNRES,IM,JM,NM,NR,NF0,NF1,EFAC,BLAT READ(5,*) OUTGRID - READ(5,*) INPUTOROG READ(5,*) mask_only READ(5,*) merge_file -! MTNRES=1 -! IM=48 -! JM=48 -! NM=46 -! NF0=0 -! NF1=0 -! efac=0 -! blat=0 -! NR=0 -! OUTGRID = "C48_grid.tile1.nc" -! INPUTOROG = "oro.288x144.nc" - print*, "INPUTOROG=", trim(INPUTOROG) - print*, "IM,JM=", IM, JM + NM=0 + NF0=0 + NF1=0 + EFAC=0 + NR=0 + print*, "INPUTOROG= ", trim(INPUTOROG) print*, "MASK_ONLY", mask_only - print*, "MERGE_FILE", trim(merge_file) + print*, "MERGE_FILE ", trim(merge_file) ! --- MTNRES defines the input (highest) elev resolution ! --- =1 is topo30 30" in units of 1/2 minute. ! so MTNRES for old values must be *2. @@ -106,58 +97,47 @@ ! --- other possibilities are =8 for 4' and =4 for 2' see ! HJ for T1000 test. Must set to 1 for now. MTNRES=1 - print*, MTNRES,IM,JM,NM,NR,NF0,NF1,EFAC,BLAT + print*, MTNRES,NM,NR,NF0,NF1,EFAC NW=(NM+1)*((NR+1)*NM+2) IMN = 360*120/MTNRES JMN = 180*120/MTNRES print *, ' Starting terr12 mtnlm7_slm30.f IMN,JMN:',IMN,JMN -! --- read the grid resolution if the OUTGRID exists. - if( trim(OUTGRID) .NE. "none" ) then - inquire(file=trim(OUTGRID), exist=fexist) - if(.not. fexist) then - print*, "FATAL ERROR: file "//trim(OUTGRID) - print*, " does not exist." - CALL ERREXIT(4) - endif - do ncid = 103, 512 - inquire( ncid,OPENED=opened ) - if( .NOT.opened )exit - end do +! --- read the grid resolution from OUTGRID. + inquire(file=trim(OUTGRID), exist=fexist) + if(.not. fexist) then + print*, "FATAL ERROR: file "//trim(OUTGRID) + print*, " does not exist." + CALL ERREXIT(4) + endif + do ncid = 103, 512 + inquire( ncid,OPENED=opened ) + if( .NOT.opened )exit + end do - print*, "outgrid=", trim(outgrid) - error=NF__OPEN(trim(OUTGRID),NF_NOWRITE,fsize,ncid) - call netcdf_err(error, 'Open file '//trim(OUTGRID) ) - error=nf_inq_dimid(ncid, 'nx', id_dim) - call netcdf_err(error, 'inquire dimension nx from file '// + print*, "READ outgrid=", trim(outgrid) + error=NF__OPEN(trim(OUTGRID),NF_NOWRITE,fsize,ncid) + call netcdf_err(error, 'Open file '//trim(OUTGRID) ) + error=nf_inq_dimid(ncid, 'nx', id_dim) + call netcdf_err(error, 'inquire dimension nx from file '// & trim(OUTGRID) ) - error=nf_inq_dimlen(ncid,id_dim,nx) - call netcdf_err(error, 'inquire dimension nx length '// + error=nf_inq_dimlen(ncid,id_dim,nx) + call netcdf_err(error, 'inquire dimension nx length '// & 'from file '//trim(OUTGRID) ) - error=nf_inq_dimid(ncid, 'ny', id_dim) - call netcdf_err(error, 'inquire dimension ny from file '// + error=nf_inq_dimid(ncid, 'ny', id_dim) + call netcdf_err(error, 'inquire dimension ny from file '// & trim(OUTGRID) ) - error=nf_inq_dimlen(ncid,id_dim,ny) - call netcdf_err(error, 'inquire dimension ny length '// + error=nf_inq_dimlen(ncid,id_dim,ny) + call netcdf_err(error, 'inquire dimension ny length '// & 'from file '//trim(OUTGRID) ) - print*, "nx = ", nx - if(IM .ne. nx/2) then - print*, "IM=",IM, " /= grid file nx/2=",nx/2 - print*, "Set IM = ", nx/2 - IM = nx/2 - endif - if(JM .ne. ny/2) then - print*, "JM=",JM, " /= grid file ny/2=",ny/2 - print*, "Set JM = ", ny/2 - JM = ny/2 - endif - error=nf_close(ncid) - call netcdf_err(error, 'close file '//trim(OUTGRID) ) - - endif + IM = nx/2 + JM = ny/2 + print*, "nx, ny, im, jm = ", nx, ny, im, jm + error=nf_close(ncid) + call netcdf_err(error, 'close file '//trim(OUTGRID) ) - CALL TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, + CALL TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC, & OUTGRID,INPUTOROG,MASK_ONLY,MERGE_FILE) STOP END @@ -174,8 +154,6 @@ !! @param[in] NF1 Second order spectral filter parameters. !! @param[in] NW Number of waves. !! @param[in] EFAC Factor to adjust orography by its variance. -!! @param[in] BLAT When less than zero, reverse latitude/ -!! longitude for output. !! @param[in] OUTGRID The 'grid' file for the model tile. !! @param[in] INPUTOROG Input orography/GWD file on gaussian !! grid. When specified, will be interpolated to model tile. @@ -184,7 +162,7 @@ !! @param[in] MASK_ONLY Flag to generate the Land Mask only !! @param[in] MERGE_FILE Ocean merge file !! @author Jordan Alpert NOAA/EMC - SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, + SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC, & OUTGRID,INPUTOROG,MASK_ONLY,MERGE_FILE) implicit none include 'netcdf.inc' @@ -200,7 +178,7 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, real, PARAMETER :: PI=3.1415926535897931 integer, PARAMETER :: NMT=14 - integer :: efac,blat,zsave1,zsave2 + integer :: efac,zsave1,zsave2 integer :: mskocn,notocn integer :: i,j,nx,ny,ncid,js,jn,iw,ie,k,it,jt,error,id_dim integer :: id_var,nx_in,ny_in,fsize,wgta,IN,INW,INE,IS,ISW,ISE @@ -246,7 +224,7 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, complex :: ffj(im/2+1) logical :: grid_from_file,output_binary,fexist,opened - logical :: SPECTR, REVLAT, FILTER + logical :: SPECTR, FILTER logical :: is_south_pole(IM,JM), is_north_pole(IM,JM) logical :: LB(IM*JM) @@ -275,7 +253,6 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, DEGRAD = 180./PI SPECTR = NM .GT. 0 ! if NM <=0 grid is assumed lat/lon FILTER = .TRUE. ! Spectr Filter defaults true and set by NF1 & NF0 - REVLAT = BLAT .LT. 0 ! Reverse latitude/longitude for output MSKOCN = 1 ! Ocean land sea mask =1, =0 if not present NOTOCN = 1 ! =1 Ocean lsm input reverse: Ocean=1, land=0 ! --- The LSM Gaussian file from the ocean model sometimes arrives with @@ -329,8 +306,8 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, ! ! --- IMN,JMN - print*, ' IM, JM, NM, NR, NF0, NF1, EFAC, BLAT' - print*, IM,JM,NM,NR,NF0,NF1,EFAC,BLAT + print*, ' IM, JM, NM, NR, NF0, NF1, EFAC' + print*, IM,JM,NM,NR,NF0,NF1,EFAC print *,' imn,jmn,glob(imn,jmn)=',imn,jmn,glob(imn,jmn) print *,' UBOUND ZAVG=',UBOUND(ZAVG) print *,' UBOUND glob=',UBOUND(glob) @@ -409,7 +386,7 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, ! This code assumes that lat runs from north to south for gg! ! - print *,' SPECTR=',SPECTR,' REVLAT=',REVLAT,' ** with GICE-07 **' + print *,' SPECTR=',SPECTR,' ** with GICE-07 **' IF (SPECTR) THEN CALL SPLAT(4,JM,COSCLT,WGTCLT) DO J=1,JM/2 @@ -936,10 +913,10 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, & //trim(INPUTOROG) ) print*, "calling MAKEOA3 to compute OA, OL" - CALL MAKEOA3(ZAVG,zslm,VAR,GLAT,OA,OL,IWORK,ELVMAX,ORO,SLM, + CALL MAKEOA3(ZAVG,VAR,GLAT,OA,OL,IWORK,ELVMAX,ORO,SLM, 1 WORK1,WORK2,WORK3,WORK4,WORK5,WORK6, 2 IM,JM,IMN,JMN,geolon_c,geolat_c, - 3 geolon,geolat,is_south_pole,is_north_pole,nx_in,ny_in, + 3 geolon,geolat,nx_in,ny_in, 4 oa_in,ol_in,slm_in,lon_in,lat_in) deallocate(oa_in,ol_in,slm_in,lon_in,lat_in) @@ -1290,13 +1267,6 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, enddo ELSE - IF (REVLAT) THEN - CALL REVERS(IM, JM, numi, SLM, WORK1) - CALL REVERS(IM, JM, numi, ORO, WORK1) - DO IMT=1,NMT - CALL REVERS(IM, JM, numi, HPRIME(1,1,IMT), WORK1) - ENDDO - ENDIF ORS=0. ORF=ORO ENDIF @@ -1507,7 +1477,7 @@ SUBROUTINE TERSUB(IMN,JMN,IM,JM,NM,NR,NF0,NF1,NW,EFAC,BLAT, tend=timef() write(6,*)' Total runtime time= ',tend-tbeg1 RETURN - END + END SUBROUTINE TERSUB !> Create the orography, land-mask, standard deviation of !! orography and the convexity on a model gaussian grid. @@ -1545,9 +1515,7 @@ SUBROUTINE MAKEMT(ZAVG,ZSLM,ORO,SLM,VAR,VAR4, INTEGER ZAVG(IMN,JMN),ZSLM(IMN,JMN) DIMENSION ORO(IM,JM),SLM(IM,JM),VAR(IM,JM),VAR4(IM,JM) DIMENSION IST(IM,jm),IEN(IM,jm),JST(JM),JEN(JM),numi(jm) - LOGICAL FLAG, DEBUG -C==== DATA DEBUG/.TRUE./ - DATA DEBUG/.FALSE./ + LOGICAL FLAG C ! ---- OCLSM holds the ocean (im,jm) grid print *,' _____ SUBROUTINE MAKEMT ' @@ -3627,7 +3595,6 @@ end subroutine interpolate_mismatch !! is computed from the high-resolution orography data. !! !! @param[in] zavg High-resolution orography data. -!! @param[in] zslm High-resolution land-mask data. Not used. !! @param[in] var Standard deviation of orography on the model grid. !! @param[out] glat Latitude of each row of input terrain dataset. !! @param[out] oa4 Orographic asymmetry on the model grid. Four @@ -3654,8 +3621,6 @@ end subroutine interpolate_mismatch !! @param[in] lat_c Corner point latitudes of the model grid points. !! @param[in] lon_t Center point longitudes of the model grid points. !! @param[in] lat_t Center point latitudes of the model grid points. -!! @param[in] is_south_pole Not used. -!! @param[in] is_north_pole Not used. !! @param[in] imi 'i' dimension of input gfs orography data. !! @param[in] jmi 'j' dimension of input gfs orography data. !! @param[in] oa_in Asymmetry on the input gfs orography data. @@ -3664,10 +3629,10 @@ end subroutine interpolate_mismatch !! @param[in] lon_in Longitude on the input gfs orography data. !! @param[in] lat_in Latitude on the input gfs orography data. !! @author Jordan Alpert NOAA/EMC - SUBROUTINE MAKEOA3(ZAVG,zslm,VAR,GLAT,OA4,OL,IOA4,ELVMAX, + SUBROUTINE MAKEOA3(ZAVG,VAR,GLAT,OA4,OL,IOA4,ELVMAX, 1 ORO,SLM,oro1,XNSUM,XNSUM1,XNSUM2,XNSUM3,XNSUM4, 2 IM,JM,IMN,JMN,lon_c,lat_c,lon_t,lat_t, - 3 is_south_pole,is_north_pole,IMI,JMI,OA_IN,OL_IN, + 3 IMI,JMI,OA_IN,OL_IN, 4 slm_in,lon_in,lat_in) ! Required when using iplib v4.0 or higher. @@ -3681,7 +3646,7 @@ SUBROUTINE MAKEOA3(ZAVG,zslm,VAR,GLAT,OA4,OL,IOA4,ELVMAX, real, PARAMETER :: R2D=180./3.14159265358979 integer IM,JM,IMN,JMN,IMI,JMI real GLAT(JMN) - INTEGER ZAVG(IMN,JMN),ZSLM(IMN,JMN) + INTEGER ZAVG(IMN,JMN) real SLM(IM,JM) real ORO(IM,JM),ORO1(IM,JM),ELVMAX(IM,JM),ZMAX(IM,JM) real OA4(IM,JM,4) @@ -3691,26 +3656,16 @@ SUBROUTINE MAKEOA3(ZAVG,zslm,VAR,GLAT,OA4,OL,IOA4,ELVMAX, real lon_in(IMI,JMI), lat_in(IMI,JMI) real lon_c(IM+1,JM+1), lat_c(IM+1,JM+1) real lon_t(IM,JM), lat_t(IM,JM) - logical is_south_pole(IM,JM), is_north_pole(IM,JM) real XNSUM(IM,JM),XNSUM1(IM,JM),XNSUM2(IM,JM) real XNSUM3(IM,JM),XNSUM4(IM,JM) real VAR(IM,JM),OL(IM,JM,4) - LOGICAL FLAG integer i,j,ilist(IMN),numx,i1,j1,ii1 - integer KWD,II,npts + integer KWD real LONO(4),LATO(4),LONI,LATI - real DELXN,HC,HEIGHT,XNPU,XNPD,T + real DELXN,HC,HEIGHT,T integer NS0,NS1,NS2,NS3,NS4,NS5,NS6 logical inside_a_polygon - real lon,lat,dlon,dlat,dlat_old - real lon1,lat1,lon2,lat2 - real xnsum11,xnsum12,xnsum21,xnsum22,xnsumx - real HC_11, HC_12, HC_21, HC_22 - real xnsum1_11,xnsum1_12,xnsum1_21,xnsum1_22 - real xnsum2_11,xnsum2_12,xnsum2_21,xnsum2_22 - real get_lon_angle, get_lat_angle, get_xnsum - integer ist, ien, jst, jen - real xland,xwatr,xl1,xs1,oroavg + integer jst, jen integer int_opt, ipopt(20), kgds_input(200), kgds_output(200) integer count_land_output integer ij, ijmdl_output, iret, num_mismatch_land, num @@ -4055,45 +4010,6 @@ SUBROUTINE MAKEOA3(ZAVG,zslm,VAR,GLAT,OA4,OL,IOA4,ELVMAX, C RETURN END SUBROUTINE MAKEOA3 - -!> Reverse the east-west and north-south axes -!! in a two-dimensional array. -!! -!! @param [in] im 'i' dimension of the 2-d array. -!! @param [in] jm 'j' dimension of the 2-d array. -!! @param [in] numi Not used. -!! @param [inout] f The two-dimensional array to -!! be processed. -!! @param [out] wrk Two-dimensional work array. -!! @author Jordan Alpert NOAA/EMC - SUBROUTINE REVERS(IM, JM, numi, F, WRK) -! - REAL F(IM,JM), WRK(IM,JM) - integer numi(jm) - imb2 = im / 2 - do i=1,im*jm - WRK(i,1) = F(i,1) - enddo - do j=1,jm - jr = jm - j + 1 - do i=1,im - ir = i + imb2 - if (ir .gt. im) ir = ir - im - f(ir,jr) = WRK(i,j) - enddo - enddo -! - tem = 0.0 - do i=1,im - tem= tem + F(I,1) - enddo - tem = tem / im - do i=1,im - F(I,1) = tem - enddo -! - RETURN - END !> Convert from a reduced grid to a full grid. !! @@ -4340,7 +4256,7 @@ subroutine maxmin(ia,len,tile) ccmr integer*2 ia(len) character*7 tile - integer iaamax, iaamin, len, j, m, ja, kount + integer iaamax, iaamin, len, m, ja, kount integer(8) sum2,std,mean,isum integer i_count_notset,kount_9 ! --- missing is -9999 @@ -4723,13 +4639,13 @@ subroutine get_xnsum2(lon1,lat1,lon2,lat2,IMN,JMN, real, intent(out) :: xnsum1,xnsum2,HC logical verbose - real lon1,lat1,lon2,lat2,oro,delxn + real lon1,lat1,lon2,lat2,delxn integer IMN,JMN real glat(JMN) integer zavg(IMN,JMN) integer i, j, ist, ien, jst, jen, i1 real HEIGHT, var - real XW1,XW2,slm,xnsum + real XW1,XW2,xnsum !---figure out ist,ien,jst,jen do j = 1, JMN if( GLAT(J) .GT. lat1 ) then @@ -4817,13 +4733,12 @@ subroutine get_xnsum3(lon1,lat1,lon2,lat2,IMN,JMN, implicit none real, intent(out) :: xnsum1,xnsum2 - real lon1,lat1,lon2,lat2,oro,delxn + real lon1,lat1,lon2,lat2,delxn integer IMN,JMN real glat(JMN) integer zavg(IMN,JMN) integer i, j, ist, ien, jst, jen, i1 real HEIGHT, HC - real XW1,XW2,slm,xnsum !---figure out ist,ien,jst,jen ! if lat1 or lat 2 is 90 degree. set jst = JMN jst = JMN @@ -4895,7 +4810,6 @@ subroutine nanc(a,l,c) data inaq3/x'FFC00000'/ data inaq4/x'FFFFFFFF'/ c - real(kind=8)a(l),rtc,t1,t2 character*(*) c c t1=rtc() cgwv print *, ' nanc call ',c diff --git a/sorc/orog_mask_tools.fd/orog_gsl.fd/CMakeLists.txt b/sorc/orog_mask_tools.fd/orog_gsl.fd/CMakeLists.txt index 8704e4d54..e37889f46 100644 --- a/sorc/orog_mask_tools.fd/orog_gsl.fd/CMakeLists.txt +++ b/sorc/orog_mask_tools.fd/orog_gsl.fd/CMakeLists.txt @@ -26,6 +26,10 @@ target_link_libraries( PUBLIC NetCDF::NetCDF_Fortran) +if(OpenMP_Fortran_FOUND) + target_link_libraries(orog_gsl_lib PUBLIC OpenMP::OpenMP_Fortran) +endif() + target_link_libraries(${exe_name} PRIVATE orog_gsl_lib) install(TARGETS ${exe_name}) diff --git a/sorc/orog_mask_tools.fd/orog_gsl.fd/gsl_oro_data.f90 b/sorc/orog_mask_tools.fd/orog_gsl.fd/gsl_oro_data.f90 index e629dd2de..4e36ee9da 100644 --- a/sorc/orog_mask_tools.fd/orog_gsl.fd/gsl_oro_data.f90 +++ b/sorc/orog_mask_tools.fd/orog_gsl.fd/gsl_oro_data.f90 @@ -35,12 +35,13 @@ !! @return 0 for success, error code otherwise. program gsl_oro_data +use omp_lib + use gsl_oro_data_sm_scale, only: calc_gsl_oro_data_sm_scale use gsl_oro_data_lg_scale, only: calc_gsl_oro_data_lg_scale implicit none - character(len=2) :: tile_num ! tile number entered by user character(len=7) :: res_indx ! grid-resolution index, e.g., 96, 192, 384, 768, ! etc. entered by user @@ -49,7 +50,7 @@ program gsl_oro_data logical :: duplicate_oro_data_file ! flag for whether oro_data_ls file is a duplicate ! of oro_data_ss due to minimum grid size being less than 7.5km - +integer :: tid, nthreads ! Read in FV3GFS grid info print * @@ -67,6 +68,13 @@ program gsl_oro_data print *, "Halo = ", halo print * +!$OMP PARALLEL PRIVATE(TID) + tid = omp_get_thread_num() + if (tid==0) then + nthreads = omp_get_num_threads() + print*,'Number of threads = ', nthreads + endif +!$OMP END PARALLEL call calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo,duplicate_oro_data_file) diff --git a/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_lg_scale.f90 b/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_lg_scale.f90 index f2d07d138..a9cee90a2 100644 --- a/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_lg_scale.f90 +++ b/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_lg_scale.f90 @@ -349,11 +349,15 @@ subroutine calc_gsl_oro_data_lg_scale(tile_num,res_indx,halo) ! ol1,...,ol4 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -cell_count = 1 - +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,CELL_COUNT,DLTA_LAT,DLTA_LON) & +!$OMP PRIVATE(I_BLK,J_BLK,LON_BLK,LAT_BLK,S_II,S_JJ,E_II,E_JJ,II_M,JJ_M) & +!$OMP PRIVATE(HGT_M_COARSE_ON_FINE,JJ,JJ_LOC,II,II_LOC,ZS,ZS_ACCUM,ZS_MEAN) & +!$OMP PRIVATE(HGT_M_COARSE,SUM2,SUM4,NFINEPOINTS,VAR,NU,ND,RATIO,NW,NT) do j = 1,dimY_FV3 do i = 1,dimX_FV3 + cell_count = ( (j-1) * dimX_FV3 ) + i + ! Calculate approximate side-lengths of square lat-long "coarse" grid ! cell centered on FV3 cell (units = radians) dlta_lat = sqrt(area_FV3(i,j))/ae @@ -552,7 +556,6 @@ subroutine calc_gsl_oro_data_lg_scale(tile_num,res_indx,halo) OL4(cell_count) = 0._real_kind if ( detrend_topography ) deallocate (HGT_M_coarse_on_fine) deallocate(zs) - cell_count = cell_count + 1 cycle ! move on to next (coarse) grid cell end if @@ -784,17 +787,12 @@ subroutine calc_gsl_oro_data_lg_scale(tile_num,res_indx,halo) OL4(cell_count) = 0._real_kind end if - - if ( detrend_topography ) deallocate (HGT_M_coarse_on_fine) deallocate (zs) - cell_count = cell_count + 1 - end do ! j = 1,dimY_FV3 end do ! i = 1,dimX_FV3 - - +!$OMP END PARALLEL DO ! ! Output GWD statistics fields to netCDF file diff --git a/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_sm_scale.f90 b/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_sm_scale.f90 index d91238375..067101894 100644 --- a/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_sm_scale.f90 +++ b/sorc/orog_mask_tools.fd/orog_gsl.fd/module_gsl_oro_data_sm_scale.f90 @@ -119,7 +119,6 @@ subroutine calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo, & logical :: fexist - print *, "Creating oro_data_ss file" print * @@ -365,11 +364,15 @@ subroutine calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo, & ! ol1,...,ol4 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -cell_count = 1 - +!$OMP PARALLEL DO DEFAULT(SHARED) PRIVATE(I,DLTA_LAT,DLTA_LON) & +!$OMP PRIVATE(I_BLK,J_BLK,S_II,S_JJ,E_II,E_JJ,LON_BLK,LAT_BLK,II_M,JJ_M) & +!$OMP PRIVATE(ZS,II,JJ,II_LOC,JJ_LOC,SUM2,NFINEPOINTS,CELL_COUNT,ZS_ACCUM,ZS_MEAN) & +!$OMP PRIVATE(SUM4,VAR,NU,ND,RATIO,NW,NT) do j = 1,dimY_FV3 do i = 1,dimX_FV3 + cell_count = ( (j-1) * dimX_FV3 ) + i + ! Calculate approximate side-lengths of square lat-long "coarse" grid ! cell centered on FV3 cell (units = radians) dlta_lat = sqrt(area_FV3(i,j))/ae @@ -499,7 +502,6 @@ subroutine calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo, & OL3(cell_count) = 0._real_kind OL4(cell_count) = 0._real_kind deallocate(zs) - cell_count = cell_count + 1 cycle ! move on to next (coarse) grid cell end if @@ -527,7 +529,6 @@ subroutine calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo, & end do std_dev(cell_count) = sqrt( sum2/real(nfinepoints,real_kind) ) - ! ! Calculate convexity of sub-grid-scale terrain ! @@ -731,16 +732,11 @@ subroutine calc_gsl_oro_data_sm_scale(tile_num,res_indx,halo, & OL4(cell_count) = 0._real_kind end if - - deallocate (zs) - cell_count = cell_count + 1 - end do ! j = 1,dimY_FV3 end do ! i = 1,dimX_FV3 - - +!$OMP END PARALLEL DO ! ! Output GWD statistics fields to netCDF file @@ -1283,6 +1279,4 @@ subroutine netcdf_err(err,string) return end subroutine netcdf_err - - end module gsl_oro_data_sm_scale diff --git a/tests/global_cycle/CMakeLists.txt b/tests/global_cycle/CMakeLists.txt index 2427551f2..7937d229c 100644 --- a/tests/global_cycle/CMakeLists.txt +++ b/tests/global_cycle/CMakeLists.txt @@ -3,6 +3,19 @@ # # George Gayno, Lin Gan, Ed Hartnett, Larissa Reames +set(CYCLE_URL "https://ftp.emc.ncep.noaa.gov/static_files/public/UFS/ufs_utils/unit_tests/global_cycle") + +set(FILE1 "soil_sfcincr_jedi.001") +set(FILE2 "soil_sfcincr_jedi.002") +set(FILE3 "soil_sfcincr_jedi.003") +set(FILE4 "soil_sfcincr_jedi.004") +set(FILE5 "soil_sfcincr_jedi.005") +set(FILE6 "soil_sfcincr_jedi.006") + +foreach(THE_FILE IN LISTS FILE1 FILE2 FILE3 FILE4 FILE5 FILE6) + PULL_DATA(${CYCLE_URL} ${THE_FILE}) +endforeach() + # Include cmake to allow parallel I/O tests. include (LibMPI) @@ -14,12 +27,14 @@ endif() include_directories(${PROJECT_SOURCE_DIR}) -execute_process( COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_CURRENT_SOURCE_DIR}/LSanSuppress.supp ${CMAKE_CURRENT_BINARY_DIR}/LSanSuppress.supp) - add_executable(ftst_land_increments ftst_land_increments.F90) target_link_libraries(ftst_land_increments global_cycle_lib) +add_executable(ftst_read_increments ftst_read_increments.F90) +target_link_libraries(ftst_read_increments global_cycle_lib) add_mpi_test(global_cycle-ftst_land_increments EXECUTABLE ${CMAKE_CURRENT_BINARY_DIR}/ftst_land_increments NUMPROCS 1 TIMEOUT 60) +add_mpi_test(global_cycle-ftst_read_increments + EXECUTABLE ${CMAKE_CURRENT_BINARY_DIR}/ftst_read_increments + NUMPROCS 6 TIMEOUT 60) diff --git a/tests/global_cycle/ftst_land_increments.F90 b/tests/global_cycle/ftst_land_increments.F90 index 8519e178b..e8fd59e01 100644 --- a/tests/global_cycle/ftst_land_increments.F90 +++ b/tests/global_cycle/ftst_land_increments.F90 @@ -3,6 +3,10 @@ program ftst_land_increments ! Test "apply_land_da_adjustments" using sample points. ! ! author: George Gayno (george.gayno@noaa.gov) +! author: Yuan Xue: add a total of four testing points to test out +! the newly added frozen soil ice fraction calculations +! under different scenarios after ingesting +! soil temp increments (08/2023) use mpi use land_increments @@ -10,7 +14,7 @@ program ftst_land_increments implicit none integer :: my_rank, ierr, lsm, isot, ivegsrc - integer :: lensfc, lsoil, l + integer :: lensfc, lsoil, l, lsoil_incr real, parameter :: EPSILON=0.001 @@ -31,8 +35,9 @@ program ftst_land_increments isot = 1 ! STATSGO soil type. ivegsrc = 1 ! IGBP vegetation type. lsoil = 4 ! Number of soil layers. + lsoil_incr = 3 ! Number of soil layers (from top) to apply increments to. - lensfc= 3 ! Number of test points. + lensfc= 4 ! Number of test points. allocate(zsoil(lsoil)) allocate(rsoiltype(lensfc)) ! Soil type. @@ -53,7 +58,8 @@ program ftst_land_increments ! Point 1 is above freezing before the adjustment ! and above freezing after the adjustment. Therefore, -! the increments to STC and SLC will be retained. +! the increments to STC will be retained. +! temp: unfrozen ==> unfrozen rsoiltype(1) = 5. mask(1) = 1 @@ -61,53 +67,71 @@ program ftst_land_increments smc_anl(1,:) = .25 slc_anl(1,:) = .25 - stc_anl(1,1:3) = 281.0 ! DA only updates 3 layers + stc_anl(1,1:lsoil_incr) = 281.0 ! Point 2 is below freezing before the adjustment ! and above freezing after the adjustment. Therefore, -! the increment to STC will be removed, and SMC / SLC -! are unchanged. +! all soil ice will be melted +! temp: frozen ==> unfrozen rsoiltype(2) = 5. mask(2) = 1 - stc_bck(2,:) = 270.0 + stc_bck(2,:) = 271.0 smc_anl(2,:) = .25 slc_anl(2,:) = .20 - stc_anl(2,1:3) = 274.0 + stc_anl(2,1:lsoil_incr) = 275.0 -! Point 3 freezes. Therefore, -! the increment to STC will be removed, and SMC / SLC -! are unchanged. +! Point 3 freezes before and after the adjustment. Therefore, +! SLC will be recomputed, SMC remained unchanged +! temp: frozen ==> frozen rsoiltype(3) = 5. mask(3) = 1 - stc_bck(3,:) = 274.0 + stc_bck(3,:) = 272.0 smc_anl(3,:) = .25 - slc_anl(3,:) = .25 - stc_anl(3,1:3) = 271.0 + slc_anl(3,:) = .20 + stc_anl(3,1:lsoil_incr) = 271.0 - call apply_land_da_adjustments_soil(lsm, isot, ivegsrc,lensfc, & +! Point 4 is above freezing before and below freezing after the adjustment +! Therfore, SLC will be recomputed, SMC remained unchanged +! temp: unfrozen ==> frozen + + rsoiltype(4) = 5. + mask(4) = 1 + stc_bck(4,:) = 280.0 + + smc_anl(4,:) = .25 + slc_anl(4,:) = .25 + stc_anl(4,1:lsoil_incr) = 271.0 + + call apply_land_da_adjustments_soil(lsoil_incr, lsm, isot, ivegsrc,lensfc, & lsoil, rsoiltype, mask, stc_bck, stc_anl, smc_anl, slc_anl, & stc_updated, slc_updated,zsoil) - do l = 1, 3 - if (abs(smc_anl(1,l) - 0.25) > EPSILON) stop 2 - if (abs(slc_anl(1,l) - 0.25) > EPSILON) stop 4 - if (abs(stc_anl(1,l) - 281.) > EPSILON) stop 3 + do l = 1,lsoil_incr + if (abs(smc_anl(1,l) - 0.25) > EPSILON) stop 2 + if (abs(slc_anl(1,l) - 0.25) > EPSILON) stop 3 + if (abs(stc_anl(1,l) - 281.) > EPSILON) stop 4 + enddo + + do l = 1,lsoil_incr + if (abs(smc_anl(2,l) - 0.25) > EPSILON) stop 5 + if (abs(slc_anl(2,l) - 0.25) > EPSILON) stop 6 + if (abs(stc_anl(2,l) - 275.) > EPSILON) stop 7 enddo - do l = 1, 3 - if (abs(smc_anl(2,l) - 0.25) > EPSILON) stop 6 - if (abs(slc_anl(2,l) - 0.20) > EPSILON) stop 8 - if (abs(stc_anl(2,l) - 270.) > EPSILON) stop 5 + do l = 1,lsoil_incr + if (abs(smc_anl(3,l) - 0.25) > EPSILON) stop 8 + if (abs(slc_anl(3,l) - 0.112) > EPSILON) stop 9 + if (abs(stc_anl(3,l) - 271.) > EPSILON) stop 10 enddo - do l = 1, 3 - if (abs(smc_anl(3,l) - 0.25) > EPSILON) stop 10 - if (abs(slc_anl(3,l) - 0.25) > EPSILON) stop 12 - if (abs(stc_anl(3,l) - 274.) > EPSILON) stop 11 + do l = 1,lsoil_incr + if (abs(smc_anl(4,l) - 0.25) > EPSILON) stop 11 + if (abs(slc_anl(4,l) - 0.112) > EPSILON) stop 12 + if (abs(stc_anl(4,l) - 271.) > EPSILON) stop 13 enddo call mpi_finalize(ierr) diff --git a/tests/global_cycle/ftst_read_increments.F90 b/tests/global_cycle/ftst_read_increments.F90 new file mode 100644 index 000000000..151673508 --- /dev/null +++ b/tests/global_cycle/ftst_read_increments.F90 @@ -0,0 +1,168 @@ +! Unit test for global_cycle routine "read_data". +! +! Reads a sample soil increment file from file soil_xainc.$NNN +! NOTE: $NNN corresponds to (mpi rank + 1) +! Total number of processes= 6 (corresponds to 6 tiles) +! Each process is assigned one increment file on one tile to read +! If any portion of the variable does not match expected values, +! the test fails. +! +! Author: Yuan Xue, 07/17/2023 + +module chdir_mod + implicit none + interface + integer function c_chdir(path) bind(C,name="chdir") + use iso_c_binding + character(kind=c_char) :: path(*) + end function + end interface +contains + subroutine chdir(path, err) + use iso_c_binding + character(*) :: path + integer, optional, intent(out) :: err + integer :: loc_err + + loc_err = c_chdir(path//c_null_char) + if (present(err)) err = loc_err + end subroutine +end module chdir_mod + + program read_increments + + use read_write_data, only : read_data + use mpi + use chdir_mod + + implicit none + + integer:: ierr,my_rank + integer:: lsoil, l + integer:: lensfc, ij_input + + integer, parameter:: NUM_VALUES=4 + real, parameter :: EPSILON=0.0001 + + real, allocatable:: SLCINC(:,:),STCINC(:,:) + real:: stc_inc_expected_values_tile1(NUM_VALUES) + real:: stc_inc_expected_values_tile2(NUM_VALUES) + real:: stc_inc_expected_values_tile3(NUM_VALUES) + real:: stc_inc_expected_values_tile4(NUM_VALUES) + real:: stc_inc_expected_values_tile5(NUM_VALUES) + real:: stc_inc_expected_values_tile6(NUM_VALUES) + real:: slc_inc_expected_values_tile1(NUM_VALUES) + real:: slc_inc_expected_values_tile2(NUM_VALUES) + real:: slc_inc_expected_values_tile3(NUM_VALUES) + real:: slc_inc_expected_values_tile4(NUM_VALUES) + real:: slc_inc_expected_values_tile5(NUM_VALUES) + real:: slc_inc_expected_values_tile6(NUM_VALUES) + + !expected values were extracted from MATLAB, which directly reads in xainc file + !each tile is examined separately here + data stc_inc_expected_values_tile1 / 3.1428, 2.9983, 2.9786, 2.9634 / + data stc_inc_expected_values_tile2 / 2.9330, 2.9121, 2.9103, 2.9069 / + data stc_inc_expected_values_tile3 / 2.7236, 2.7308, 2.7315, 2.7295 / + data stc_inc_expected_values_tile4 / 3.0229, 3.0229, 3.0229, 3.0229 / + data stc_inc_expected_values_tile5 / 2.8595, 2.8825, 2.8878, 2.8948 / + data stc_inc_expected_values_tile6 / 2.7238, 2.7238, 2.7238, 2.7238 / + data slc_inc_expected_values_tile1 / 0.0007, 0.0018, 0.0018, 0.0018 / + data slc_inc_expected_values_tile2 / 0.0034, 0.0031, 0.0029, 0.0029 / + data slc_inc_expected_values_tile3 / 0.0003, 0.0005, 0.0011, 0.0008 / + data slc_inc_expected_values_tile4 / 0.01, 0.01, 0.01, 0.01 / + data slc_inc_expected_values_tile5 / 0.0019, 0.0019, 0.0020, 0.0024 / + data slc_inc_expected_values_tile6 / 0.01, 0.01, 0.01, 0.01 / + + call mpi_init(ierr) + call MPI_Comm_rank(MPI_COMM_WORLD, my_rank, ierr) + + lsoil=4 + lensfc=36864 + ij_input = 28541 + + allocate(SLCINC(lensfc,lsoil)) + allocate(STCINC(lensfc,lsoil)) + + if (my_rank .eq. 0) print*,"Starting test of global_cycle routine read_data." + if (my_rank .eq. 0) print*,"Call routine read_data" + + call chdir("./data") + call rename("soil_sfcincr_jedi.001", "soil_xainc.001") + call rename("soil_sfcincr_jedi.002", "soil_xainc.002") + call rename("soil_sfcincr_jedi.003", "soil_xainc.003") + call rename("soil_sfcincr_jedi.004", "soil_xainc.004") + call rename("soil_sfcincr_jedi.005", "soil_xainc.005") + call rename("soil_sfcincr_jedi.006", "soil_xainc.006") + + call read_data(lsoil,lensfc,.false.,.false.,.true.,.true.,STCINC=STCINC,SLCINC=SLCINC) + + if (my_rank .eq. 0) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile1(l))& + > EPSILON) stop 20 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile1(l))& + > EPSILON) stop 40 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + if (my_rank .eq. 1) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile2(l))& + > EPSILON) stop 21 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile2(l))& + > EPSILON) stop 41 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + if (my_rank .eq. 2) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile3(l))& + > EPSILON) stop 22 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile3(l))& + > EPSILON) stop 42 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + if (my_rank .eq. 3) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile4(l))& + > EPSILON) stop 23 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile4(l))& + > EPSILON) stop 43 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + if (my_rank .eq. 4) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile5(l))& + > EPSILON) stop 24 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile5(l))& + > EPSILON) stop 44 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + if (my_rank .eq. 5) then + do l = 1,4 + if (abs(STCINC(ij_input,l) - stc_inc_expected_values_tile6(l))& + > EPSILON) stop 25 + if (abs(SLCINC(ij_input,l) - slc_inc_expected_values_tile6(l))& + > EPSILON) stop 45 + enddo + print*, "tile#", my_rank+1, "reads OK" + endif + + call MPI_Barrier(MPI_COMM_WORLD, ierr) + + if (my_rank .eq. 0) print*, "ALL is OK" + if (my_rank .eq. 0) print*, "SUCCESS!" + + deallocate(SLCINC,STCINC) + call mpi_finalize(ierr) + + end program read_increments + diff --git a/ush/fv3gfs_driver_grid.sh b/ush/fv3gfs_driver_grid.sh index 710e9fbfb..a0b48f420 100755 --- a/ush/fv3gfs_driver_grid.sh +++ b/ush/fv3gfs_driver_grid.sh @@ -197,7 +197,7 @@ if [ $gtype = uniform ] || [ $gtype = stretch ] || [ $gtype = nest ]; then echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." echo set -x - $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo + $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $topo err=$? if [ $err != 0 ]; then exit $err @@ -399,7 +399,7 @@ elif [ $gtype = regional_gfdl ] || [ $gtype = regional_esg ]; then echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." echo set -x - $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo + $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $topo err=$? if [ $err != 0 ]; then exit $err diff --git a/ush/fv3gfs_make_orog.sh b/ush/fv3gfs_make_orog.sh index 6fcff6673..56f824041 100755 --- a/ush/fv3gfs_make_orog.sh +++ b/ush/fv3gfs_make_orog.sh @@ -1,101 +1,109 @@ #!/bin/bash +#------------------------------------------------------------------- +# Program Name: fv3gfs_make_orog +# +# Run the orography ('orog') program to create mask, terrain and +# GWD fields on the model tile. +# +# Author: GFDL Programmer +# +# History Log: +# 01/2018: Initial version. +# 04/2024: Some clean up. +# +# Usage: +# Arguments: +# res - "C" Resolution of model grid - 48, 96, 768, etc. +# tile - Tile number. +# griddir - Location of model 'grid' file. +# outdir - Location of the model orography file output by +# the 'orog' program. +# indir - Location of input land mask and terrain data. +# +# Input Files: +# $GRIDFILE - The model 'grid' file +# containing georeference info. +# topography.antarctica.ramp.30s.nc - RAMP terrain data. +# landcover.umd.30s.nc - Global land mask data. +# topography.gmted2010.30s.nc - Global USGS GMTED 2010 +# terrain data. +# +# Output Files: +# out.oro.nc - The model orography file (single tile). +# +# Condition codes: +# 0 - Normal termination. +# 1 - Incorrect number of script arguments. +# 2 - Program executable does not exits. +# 3 - Error running program. +#------------------------------------------------------------------- + set -eux nargv=$# -inorogexist=0 - -if [ $nargv -eq 5 ]; then # lat-lon grid - lonb=$1 - latb=$2 - outdir=$3 - script_dir=$4 - is_latlon=1 - orogfile="none" - hist_dir=$5 - workdir=$TEMP_DIR/latlon/orog/latlon_${lonb}x${latb} -elif [ $nargv -eq 6 ]; then # cubed-sphere grid - res=$1 - lonb=$1 - latb=$1 - tile=$2 - griddir=$3 - outdir=$4 - script_dir=$5 - is_latlon=0 - orogfile="none" - hist_dir=$6 - workdir=$TEMP_DIR/C${res}/orog/tile$tile -elif [ $nargv -eq 8 ]; then # input your own orography files +if [ $nargv -eq 5 ]; then res=$1 - lonb=$1 - latb=$1 tile=$2 griddir=$3 outdir=$4 - is_latlon=0 - inputorog=$5 - script_dir=$6 - orogfile=$inputorog:t - inorogexist=1 - hist_dir=$7 - workdir=$TEMP_DIR/C${res}/orog/tile$tile + indir=$5 else - echo "Number of arguments must be 6 for cubic sphere grid" - echo "Usage for cubic sphere grid: $0 resolution tile griddir outdir script_dir hist_dir" + set +x + echo "FATAL ERROR: Number of arguments must be 5." + echo "Usage: $0 resolution tile griddir outdir indir." + set -x exit 1 fi -indir=$hist_dir executable=$exec_dir/orog if [ ! -s $executable ]; then - echo "executable does not exist" - exit 1 + set +x + echo "FATAL ERROR, ${executable} does not exist." + set -x + exit 2 fi +workdir=$TEMP_DIR/C${res}/orog/tile$tile + if [ ! -s $workdir ]; then mkdir -p $workdir ;fi if [ ! -s $outdir ]; then mkdir -p $outdir ;fi -#jcap is for Gaussian grid -#jcap=`expr $latb - 2 ` -jcap=0 -NF1=0 -NF2=0 -mtnres=1 -efac=0 -blat=0 -NR=0 - -if [ $is_latlon -eq 1 ]; then - OUTGRID="none" -else - OUTGRID="C${res}_grid.tile${tile}.nc" -fi +GRIDFILE="C${res}_grid.tile${tile}.nc" # Make Orograraphy -echo "OUTGRID = $OUTGRID" +set +x +echo "GRIDFILE = $GRIDFILE" echo "workdir = $workdir" echo "outdir = $outdir" echo "indir = $indir" +set -x cd $workdir -cp ${indir}/topography.antarctica.ramp.30s.nc . -cp ${indir}/landcover.umd.30s.nc . -cp ${indir}/topography.gmted2010.30s.nc . -if [ $inorogexist -eq 1 ]; then - cp $inputorog . -fi - -if [ $is_latlon -eq 0 ]; then - cp ${griddir}/$OUTGRID . -fi -cp $executable . - -echo $mtnres $lonb $latb $jcap $NR $NF1 $NF2 $efac $blat > INPS -echo $OUTGRID >> INPS -echo $orogfile >> INPS +ln -fs ${indir}/topography.antarctica.ramp.30s.nc . +ln -fs ${indir}/landcover.umd.30s.nc . +ln -fs ${indir}/topography.gmted2010.30s.nc . +ln -fs ${griddir}/$GRIDFILE . +ln -fs $executable . + +#------------------------------------------------------------------- +# Set up program namelist. The entries are: +# +# 1 - GRIDFILE - model 'grid' file. +# 2 - Logical to output land mask only. When creating a grid +# for the coupled model ("ocn" resolution is specified) +# this is true. The mask is then tweaked during the +# ocean merge step before the 'orog' program is run again +# (in fv3gfs_ocean_merge.sh) to create the full 'orog' +# file. When false, the 'orog' program outputs the +# full orography file. +# 3 - The input file from the ocean merge step. Defaults +# to 'none' for this script. +#------------------------------------------------------------------- + +echo $GRIDFILE > INPS if [ -z ${ocn+x} ]; then echo ".false." >> INPS else @@ -105,21 +113,19 @@ echo "none" >> INPS cat INPS time $executable < INPS +rc=$? -if [ $? -ne 0 ]; then - echo "ERROR in running $executable " - exit 1 +if [ $rc -ne 0 ]; then + set +x + echo "FATAL ERROR running $executable." + set -x + exit 3 else - if [ $is_latlon -eq 1 ]; then - outfile=oro.${lonb}x${latb}.nc - else - outfile=oro.C${res}.tile${tile}.nc - fi - + outfile=oro.C${res}.tile${tile}.nc mv ./out.oro.nc $outdir/$outfile - echo "file $outdir/$outfile is created" - echo "Successfully running $executable " + set +x + echo "Successfully ran ${executable}." + echo "File $outdir/$outfile is created." + set -x exit 0 fi - -exit diff --git a/ush/fv3gfs_ocean_merge.sh b/ush/fv3gfs_ocean_merge.sh index 500ccbd0a..519a2e9da 100755 --- a/ush/fv3gfs_ocean_merge.sh +++ b/ush/fv3gfs_ocean_merge.sh @@ -45,19 +45,22 @@ EOF for tnum in '1' '2' '3' '4' '5' '6' do cd ${TEMP_DIR}/C${res}/orog/tile$tnum - echo $tnum $res $res 0 0 0 0 0 0 > INPS - echo C${res}_grid.tile${tnum}.nc >> INPS - echo none >> INPS + echo C${res}_grid.tile${tnum}.nc > INPS echo ".false." >> INPS echo '"'${TEMP_DIR}/ocean_merged/C${res}.mx${ocn}/C${res}.mx${ocn}.tile${tnum}.nc'"' >> INPS cat INPS time ${exec_dir}/orog < INPS + rc=$? + + if [[ $rc -ne 0 ]] ; then + echo "FATAL ERROR running orog." + exit $rc + fi + ncks -4 -O ${TEMP_DIR}/ocean_merged/C${res}.mx${ocn}/C${res}.mx${ocn}.tile${tnum}.nc ${TEMP_DIR}/ocean_merged/C${res}.mx${ocn}/C${res}.mx${ocn}.tile${tnum}.nc ncks -A -v lake_frac,lake_depth ${TEMP_DIR}/ocean_merged/C${res}.mx${ocn}/C${res}.mx${ocn}.tile${tnum}.nc out.oro.nc - #cp out.oro.nc $out_dir/oro_C${res}.mx${ocn}.tile${tnum}.nc cp out.oro.nc $orog_dir/oro.C${res}.tile${tnum}.nc - #cp C${res}_grid.tile${tnum}.nc $out_dir/C${res}_grid.tile${tnum}.nc done diff --git a/ush/global_cycle.sh b/ush/global_cycle.sh index 5c69fd0df..4ce053751 100755 --- a/ush/global_cycle.sh +++ b/ush/global_cycle.sh @@ -139,8 +139,10 @@ # between the filtered and unfiltered terrain. Default is true. # DONST Process NST records when using NST model. Default is 'no'. # DO_SFCCYCLE Call sfcsub routine -# DO_LNDINC Call routine to update soil states with increment files -# DO_SNO_INC Call routine to update snow states with increment files +# DO_LNDINC Call routine to update snow/soil states with increment files +# DO_SOI_INC_GSI Call routine to update soil states with gsi(gaussian) increment files +# DO_SNO_INC_JEDI Call routine to update snow states with jedi increment files +# DO_SOI_INC_JEDI Call routine to update soil states with jedi increment files # zsea1/zsea2 When running with NST model, this is the lower/upper bound # of depth of sea temperature. In whole mm. # MAX_TASKS_CY Normally, program should be run with a number of mpi tasks @@ -263,7 +265,9 @@ use_ufo=${use_ufo:-.true.} DONST=${DONST:-"NO"} DO_SFCCYCLE=${DO_SFCCYCLE:-.true.} DO_LNDINC=${DO_LNDINC:-.false.} -DO_SNO_INC=${DO_SNO_INC:-.false.} +DO_SOI_INC_GSI=${DO_SOI_INC_GSI:-.false.} +DO_SNO_INC_JEDI=${DO_SNO_INC_JEDI:-.false.} +DO_SOI_INC_JEDI=${DO_SOI_INC_JEDI:-.false.} zsea1=${zsea1:-0} zsea2=${zsea2:-0} MAX_TASKS_CY=${MAX_TASKS_CY:-99999} @@ -289,7 +293,6 @@ FNVMXC=${FNVMXC:-${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_green FNSLPC=${FNSLPC:-${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc} FNMSKH=${FNMSKH:-${FIXgfs}/am/global_slmask.t1534.3072.1536.grb} NST_FILE=${NST_FILE:-"NULL"} -LND_SOI_FILE=${LND_SOI_FILE:-"NULL"} FNTSFA=${FNTSFA:-${COMIN}/${PREINP}sstgrb${SUFINP}} FNACNA=${FNACNA:-${COMIN}/${PREINP}engicegrb${SUFINP}} FNSNOA=${FNSNOA:-${COMIN}/${PREINP}snogrb${SUFINP}} @@ -384,8 +387,10 @@ EOF cat << EOF > fort.37 &NAMSFCD NST_FILE="$NST_FILE", - LND_SOI_FILE="$LND_SOI_FILE", - DO_SNO_INC=$DO_SNO_INC + DO_SOI_INC_GSI=$DO_SOI_INC_GSI, + DO_SNO_INC_JEDI=$DO_SNO_INC_JEDI, + DO_SOI_INC_JEDI=$DO_SOI_INC_JEDI, + lsoil_incr=3, / EOF diff --git a/ush/global_cycle_driver.sh b/ush/global_cycle_driver.sh index 745f8caf9..3f8b4d2e6 100755 --- a/ush/global_cycle_driver.sh +++ b/ush/global_cycle_driver.sh @@ -52,8 +52,9 @@ fi export DO_SFCCYLE=${DO_SFCCYCLE:-".true."} export DO_LNDINC=${DO_LNDINC:-".false."} -export LND_SOI_FILE=${LND_SOI_FILE:-"NULL"} -export DO_SNO_INC=${DO_SNO_INC:-".false."} +export DO_SOI_INC_GSI=${DO_SOI_INC_GSI:-".false."} +export DO_SNO_INC_JEDI=${DO_SNO_INC_JEDI:-".false."} +export DO_SOI_INC_JEDI=${DO_SOI_INC_JEDI:-".false."} export FRAC_GRID=${FRAC_GRID:-".false."} CRES=$(echo $CASE | cut -c 2-) @@ -93,8 +94,16 @@ for n in $(seq 1 $ntiles); do ln -fs $FIXgfs/orog/${CASE}/C${CRES}.mx${OCNRES}_oro_data.tile${n}.nc $DATA/fnorog.00$n fi - if [[ "$DO_SNO_INC" == ".true." ]] ; then - ln -fs $COMIN/$PDY.${cyc}0000.xainc.tile${n}.nc $DATA/xainc.00$n + if [[ "$DO_SNO_INC_JEDI" == ".true." ]] ; then + ln -fs $COMIN/$PDY.${cyc}0000.xainc.tile${n}.nc $DATA/snow_xainc.00$n + fi + + if [[ "$DO_SOI_INC_JEDI" == ".true." ]] ; then + ln -fs $COMIN/soil_sfcincr_jedi.00$n $DATA/soil_xainc.00$n + fi + + if [[ "$DO_SOI_INC_GSI" == ".true." ]] ; then + ln -fs $COMIN/sfcincr_gsi.00$n $DATA/sfcincr_gsi.00$n fi done