From 1f9144f9b81b232a2e842b3b9f49dd8d1010d48f Mon Sep 17 00:00:00 2001 From: Marshall Ward Date: Fri, 4 Oct 2019 18:07:14 -0400 Subject: [PATCH 1/9] TC4 integration into test suite This patch renames the tc4 test to activate it in the test suite. It also modifies the Makefile to build the input field test scripts. It also modifies the Python build scripts to be PEP8-conformant. We temporarily disable tc4 in the restart tests, since they currently fail. This needs to be addressed before we can merge this into the main branch. The patch does not enable the necessary Python modules for running on Travis, that will also be addressed later. --- .testing/Makefile | 19 +++---- .testing/_tc4/build_data.py | 68 ------------------------ .testing/_tc4/build_grid.py | 75 --------------------------- .testing/_tc4/input.nml | 27 ---------- .testing/{_tc4 => tc4}/MOM_input | 3 ++ .testing/{_tc4 => tc4}/MOM_override | 0 .testing/tc4/build_data.py | 80 +++++++++++++++++++++++++++++ .testing/tc4/build_grid.py | 76 +++++++++++++++++++++++++++ .testing/{_tc4 => tc4}/diag_table | 0 .testing/tc4/input.nml | 18 +++++++ 10 files changed, 187 insertions(+), 179 deletions(-) delete mode 100644 .testing/_tc4/build_data.py delete mode 100644 .testing/_tc4/build_grid.py delete mode 100644 .testing/_tc4/input.nml rename .testing/{_tc4 => tc4}/MOM_input (99%) rename .testing/{_tc4 => tc4}/MOM_override (100%) create mode 100644 .testing/tc4/build_data.py create mode 100644 .testing/tc4/build_grid.py rename .testing/{_tc4 => tc4}/diag_table (100%) create mode 100644 .testing/tc4/input.nml diff --git a/.testing/Makefile b/.testing/Makefile index 0cd5454e3d..650ae14324 100644 --- a/.testing/Makefile +++ b/.testing/Makefile @@ -169,7 +169,7 @@ test: $(foreach t,$(TESTS),test.$(t)) test.regressions: $(foreach c,$(CONFIGS),$(c).regression $(c).regression.diag) test.grids: $(foreach c,$(filter-out tc3,$(CONFIGS)),$(c).grid $(c).grid.diag) test.layouts: $(foreach c,$(CONFIGS),$(c).layout $(c).layout.diag) -test.restarts: $(foreach c,$(CONFIGS),$(c).restart) +test.restarts: $(foreach c,$(filter-out tc4,$(CONFIGS)),$(c).restart) test.repros: $(foreach c,$(CONFIGS),$(c).repro $(c).repro.diag) test.openmps: $(foreach c,$(CONFIGS),$(c).openmp $(c).openmp.diag) test.nans: $(foreach c,$(CONFIGS),$(c).nan $(c).nan.diag) @@ -225,6 +225,7 @@ results/%/ocean.stats.$(1): ../build/$(2)/MOM6 if [ $(3) ]; then find ../build/$(2) -name *.gcda -exec rm -f '{}' \; ; fi mkdir -p work/$$*/$(1) cp -rL $$*/* work/$$*/$(1) + cd work/$$*/$(1) && if [ -f Makefile ]; then make; fi mkdir -p work/$$*/$(1)/RESTART echo $(4) > work/$$*/$(1)/MOM_override cd work/$$*/$(1) && $$(call MPIRUN_CMD,$(5)) -n $(6) ../../../$$< 2> debug.out > std.out \ @@ -259,6 +260,7 @@ results/%/ocean.stats.restart: ../build/symmetric/MOM6 rm -rf work/$*/restart mkdir -p work/$*/restart cp -rL $*/* work/$*/restart + cd work/$*/restart && if [ -f Makefile ]; then make; fi mkdir -p work/$*/restart/RESTART # Generate the half-period input namelist # TODO: Assumes runtime set by DAYMAX, will fail if set by input.nml @@ -268,20 +270,19 @@ results/%/ocean.stats.restart: ../build/symmetric/MOM6 && if [ -z "$${timeunit}" ]; then timeunit="8.64e4"; fi \ && printf -v timeunit_int "%.f" "$${timeunit}" \ && halfperiod=$$(printf "%.f" $$(bc <<< "scale=10; 0.5 * $${daymax} * $${timeunit_int}")) \ - && printf "\n&ocean_solo_nml\n seconds = $${halfperiod}\n/\n" >> input.nml \ - && echo $${daymax} $${timeunit} + && printf "\n&ocean_solo_nml\n seconds = $${halfperiod}\n/\n" >> input.nml # Run the first half-period - cd work/$*/restart && $(MPIRUN) -n 1 ../../../$< 2> debug.out > std.out \ - || ! sed 's/^/$*.restart1: /' std.out debug.out \ - && sed 's/^/$*.restart1: /' std.out + cd work/$*/restart && $(MPIRUN) -n 1 ../../../$< 2> debug1.out > std1.out \ + || ! sed 's/^/$*.restart1: /' std1.out debug1.out \ + && sed 's/^/$*.restart1: /' std1.out # Setup the next inputs cd work/$*/restart && rm -rf INPUT && mv RESTART INPUT mkdir work/$*/restart/RESTART cd work/$*/restart && sed -i -e "s/input_filename *= *'n'/input_filename = 'r'/g" input.nml # Run the second half-period - cd work/$*/restart && $(MPIRUN) -n 1 ../../../$< 2> debug.out > std.out \ - || ! sed 's/^/$*.restart2: /' std.out debug.out \ - && sed 's/^/$*.restart2: /' std.out + cd work/$*/restart && $(MPIRUN) -n 1 ../../../$< 2> debug2.out > std2.out \ + || ! sed 's/^/$*.restart2: /' std2.out debug2.out \ + && sed 's/^/$*.restart2: /' std2.out # Archive the results and cleanup mkdir -p $(@D) cp work/$*/restart/ocean.stats $@ diff --git a/.testing/_tc4/build_data.py b/.testing/_tc4/build_data.py deleted file mode 100644 index 904db77c7a..0000000000 --- a/.testing/_tc4/build_data.py +++ /dev/null @@ -1,68 +0,0 @@ -import netCDF4 as nc -import numpy as np - -x=nc.Dataset('ocean_hgrid.nc').variables['x'][1::2,1::2] -y=nc.Dataset('ocean_hgrid.nc').variables['y'][1::2,1::2] -zbot=nc.Dataset('topog.nc').variables['depth'][:] -zbot0=zbot.max() - -def t_fc(x,y,z,radius=5.0,tmag=1.0): # a radially symmetric anomaly in the center of the domain. units are meters and degC - ny,nx=x.shape;nz=z.shape[0] - x0=x[int(ny/2),int(nx/2)];y0=y[int(ny/2),int(nx/2)] - tl=np.zeros((nz,ny,nx)) - zb=z[-1] - if len(z)>1: - zd=z/zb - else: - zd=[0.] - for k in np.arange(len(zd)): - r=np.sqrt((x-x0)**2.+(y-y0)**2.) - tl[k,:]=tl[k,:]+(1.0-np.minimum(r/radius,1.0))*tmag*(1.0-zd[k]) - return tl - -ny,nx = x.shape -nz=10;z=(np.arange(nz)*zbot0)/nz - -temp=t_fc(x,y,z) -salt=np.zeros(temp.shape)+35.0 -fl=nc.Dataset('temp_salt_ic.nc','w',format='NETCDF3_CLASSIC') -fl.createDimension('lon',nx) -fl.createDimension('lat',ny) -fl.createDimension('depth',nz) -fl.createDimension('Time',None) -zv=fl.createVariable('depth','f8',('depth')) -lonv=fl.createVariable('lon','f8',('lon')) -latv=fl.createVariable('lat','f8',('lat')) -timev=fl.createVariable('Time','f8',('Time')) -timev.calendar='noleap' -timev.units='days since 0001-01-01 00:00:00.0' -timev.modulo=' ' -tv=fl.createVariable('ptemp','f8',('Time','depth','lat','lon'),fill_value=-1.e20) -sv=fl.createVariable('salt','f8',('Time','depth','lat','lon'),fill_value=-1.e20) -tv[:]=temp[np.newaxis,:] -sv[:]=salt[np.newaxis,:] -zv[:]=z -lonv[:]=x[0,:] -latv[:]=y[:,0] -timev[0]=0. -fl.sync() -fl.close() - - -# Make Sponge forcing file -dampTime=20.0 # days -secDays=8.64e4 -fl=nc.Dataset('sponge.nc','w',format='NETCDF3_CLASSIC') -fl.createDimension('lon',nx) -fl.createDimension('lat',ny) -lonv=fl.createVariable('lon','f8',('lon')) -latv=fl.createVariable('lat','f8',('lat')) -spv=fl.createVariable('Idamp','f8',('lat','lon'),fill_value=-1.e20) -Idamp=np.zeros((ny,nx)) -if dampTime>0.: - Idamp=0.0+1.0/(dampTime*secDays) -spv[:]=Idamp -lonv[:]=x[0,:] -latv[:]=y[:,0] -fl.sync() -fl.close() diff --git a/.testing/_tc4/build_grid.py b/.testing/_tc4/build_grid.py deleted file mode 100644 index 8187e98144..0000000000 --- a/.testing/_tc4/build_grid.py +++ /dev/null @@ -1,75 +0,0 @@ -import netCDF4 as nc -from netCDF4 import stringtochar -import numpy as np - - -nx=14;ny=10 # grid size -depth0=100. #uniform depth -ds=0.01 # grid resolution at the equator in degrees -Re=6.378e6 # Radius of earth - -topo_=np.zeros((ny,nx))+depth0 -f_topo=nc.Dataset('topog.nc','w',format='NETCDF3_CLASSIC') -ny,nx=topo_.shape -f_topo.createDimension('ny',ny) -f_topo.createDimension('nx',nx) -f_topo.createDimension('ntiles',1) -f_topo.createVariable('depth','f8',('ny','nx')) -f_topo.createVariable('h2','f8',('ny','nx')) -f_topo.variables['depth'][:]=topo_ -f_topo.sync() -f_topo.close() - -x_=np.arange(0,2*nx+1)*ds # units are degrees E -y_=np.arange(0,2*ny+1)*ds # units are degrees N -x,y=np.meshgrid(x_,y_) - -dx=np.zeros((2*ny+1,2*nx)) -dy=np.zeros((2*ny,2*nx+1)) -rad_deg=np.pi/180. -dx[:]=rad_deg*Re*(x[:,1:]-x[:,0:-1])*np.cos(0.5*rad_deg*(y[:,0:-1]+y[:,1:])) -dy[:]=rad_deg*Re*(y[1:,:]-y[0:-1,:]) - -f_sg=nc.Dataset('ocean_hgrid.nc','w',format='NETCDF3_CLASSIC') -f_sg.createDimension('ny',ny*2) -f_sg.createDimension('nx',nx*2) -f_sg.createDimension('nyp',ny*2+1) -f_sg.createDimension('nxp',nx*2+1) -f_sg.createDimension('string',5) -f_sg.createVariable('y','f8',('nyp','nxp')) -f_sg.createVariable('x','f8',('nyp','nxp')) -dyv=f_sg.createVariable('dy','f8',('ny','nxp')) -dxv=f_sg.createVariable('dx','f8',('nyp','nx')) -areav=f_sg.createVariable('area','f8',('ny','nx')) -dxv.units='m' -dyv.units='m' -areav.units='m2' -f_sg.createVariable('angle_dx','f8',('nyp','nxp')) -f_sg.createVariable('tile','S1',('string')) -f_sg.variables['y'].units='degrees' -f_sg.variables['x'].units='degrees' -f_sg.variables['dy'].units='meters' -f_sg.variables['dx'].units='meters' -f_sg.variables['area'].units='m2' -f_sg.variables['angle_dx'].units='degrees' -f_sg.variables['y'][:]=y -f_sg.variables['x'][:]=x -f_sg.variables['dx'][:]=dx -f_sg.variables['dy'][:]=dy -#Compute the area bounded by lines of constant -#latitude-longitud on a sphere in m2. -dlon=x_[1:]-x_[:-1] -dlon=np.tile(dlon[np.newaxis,:],(2*ny,1)) -y1_=y_[:-1] -y1_=y1_[:,np.newaxis]*rad_deg -y2_=y_[1:] -y2_=y2_[:,np.newaxis]*rad_deg -y1_=np.tile(y1_,(1,2*nx)) -y2_=np.tile(y2_,(1,2*nx)) -area=(rad_deg*Re*Re)*(np.sin(y2_)-np.sin(y1_)) * dlon -f_sg.variables['area'][:]=area -f_sg.variables['angle_dx'][:]=0. -str_=stringtochar(np.array(['tile1'],dtype='S5')) -f_sg.variables['tile'][:] = str_ -f_sg.sync() -f_sg.close() diff --git a/.testing/_tc4/input.nml b/.testing/_tc4/input.nml deleted file mode 100644 index 29918fbdee..0000000000 --- a/.testing/_tc4/input.nml +++ /dev/null @@ -1,27 +0,0 @@ - &MOM_input_nml - output_directory = './', - input_filename = 'n' - restart_input_dir = 'INPUT/', - restart_output_dir = 'RESTART/', - parameter_filename = 'MOM_input', - 'MOM_override' / - - &diag_manager_nml - flush_nc_files = .true. - / - - &fms_nml - domains_stack_size = 710000, - stack_size = 0 / - - &ocean_domains_nml - / - - &ocean_solo_nml - months = 0 - date_init = 1,1,1,0,0,0 - hours = 0 - minutes = 0 - seconds = 0 - calendar = 'julian' / - diff --git a/.testing/_tc4/MOM_input b/.testing/tc4/MOM_input similarity index 99% rename from .testing/_tc4/MOM_input rename to .testing/tc4/MOM_input index da0e887a6a..456783af88 100644 --- a/.testing/_tc4/MOM_input +++ b/.testing/tc4/MOM_input @@ -397,3 +397,6 @@ MAXCPU = 2.88E+04 ! [wall-clock seconds] default = -1.0 ! processors used. ! === module MOM_file_parser === + +DIAG_AS_CHKSUM = True +DEBUG = True diff --git a/.testing/_tc4/MOM_override b/.testing/tc4/MOM_override similarity index 100% rename from .testing/_tc4/MOM_override rename to .testing/tc4/MOM_override diff --git a/.testing/tc4/build_data.py b/.testing/tc4/build_data.py new file mode 100644 index 0000000000..50f45ce9f1 --- /dev/null +++ b/.testing/tc4/build_data.py @@ -0,0 +1,80 @@ +import netCDF4 as nc +import numpy as np + +x = nc.Dataset('ocean_hgrid.nc').variables['x'][1::2, 1::2] +y = nc.Dataset('ocean_hgrid.nc').variables['y'][1::2, 1::2] +zbot = nc.Dataset('topog.nc').variables['depth'][:] +zbot0 = zbot.max() + + +def t_fc(x, y, z, radius=5.0, tmag=1.0): + """a radially symmetric anomaly in the center of the domain. + units are meters and degC. + """ + ny, nx = x.shape + nz = z.shape[0] + + x0 = x[int(ny/2), int(nx/2)] + y0 = y[int(ny/2), int(nx/2)] + + tl = np.zeros((nz, ny, nx)) + zb = z[-1] + if len(z) > 1: + zd = z / zb + else: + zd = [0.] + for k in np.arange(len(zd)): + r = np.sqrt((x - x0)**2 + (y - y0)**2) + tl[k, :] += (1.0 - np.minimum(r / radius, 1.0)) * tmag * (1.0 - zd[k]) + return tl + + +ny, nx = x.shape +nz = 10 +z = (np.arange(nz) * zbot0) / nz + +temp = t_fc(x, y, z) +salt = np.zeros(temp.shape)+35.0 +fl = nc.Dataset('temp_salt_ic.nc', 'w', format='NETCDF3_CLASSIC') +fl.createDimension('lon', nx) +fl.createDimension('lat', ny) +fl.createDimension('depth', nz) +fl.createDimension('Time', None) +zv = fl.createVariable('depth', 'f8', ('depth')) +lonv = fl.createVariable('lon', 'f8', ('lon')) +latv = fl.createVariable('lat', 'f8', ('lat')) +timev = fl.createVariable('Time', 'f8', ('Time')) +timev.calendar = 'noleap' +timev.units = 'days since 0001-01-01 00:00:00.0' +timev.modulo = ' ' +tv = fl.createVariable('ptemp', 'f8', ('Time', 'depth', 'lat', 'lon'), + fill_value=-1.e20) +sv = fl.createVariable('salt', 'f8', ('Time', 'depth', 'lat', 'lon'), + fill_value=-1.e20) +tv[:] = temp[np.newaxis, :] +sv[:] = salt[np.newaxis, :] +zv[:] = z +lonv[:] = x[0, :] +latv[:] = y[:, 0] +timev[0] = 0. +fl.sync() +fl.close() + + +# Make Sponge forcing file +dampTime = 20.0 # days +secDays = 8.64e4 +fl = nc.Dataset('sponge.nc', 'w', format='NETCDF3_CLASSIC') +fl.createDimension('lon', nx) +fl.createDimension('lat', ny) +lonv = fl.createVariable('lon', 'f8', ('lon')) +latv = fl.createVariable('lat', 'f8', ('lat')) +spv = fl.createVariable('Idamp', 'f8', ('lat', 'lon'), fill_value=-1.e20) +Idamp = np.zeros((ny, nx)) +if dampTime > 0.: + Idamp = 0.0 + 1.0 / (dampTime * secDays) +spv[:] = Idamp +lonv[:] = x[0, :] +latv[:] = y[:, 0] +fl.sync() +fl.close() diff --git a/.testing/tc4/build_grid.py b/.testing/tc4/build_grid.py new file mode 100644 index 0000000000..7f1be74efd --- /dev/null +++ b/.testing/tc4/build_grid.py @@ -0,0 +1,76 @@ +import netCDF4 as nc +from netCDF4 import stringtochar +import numpy as np + +nx, ny = 14, 10 # Grid size +depth0 = 100. # Uniform depth +ds = 0.01 # grid resolution at the equator in degrees +Re = 6.378e6 # Radius of earth + +topo_ = np.zeros((ny, nx)) + depth0 +f_topo = nc.Dataset('topog.nc', 'w', format='NETCDF3_CLASSIC') +ny, nx = topo_.shape +f_topo.createDimension('ny', ny) +f_topo.createDimension('nx', nx) +f_topo.createDimension('ntiles', 1) +f_topo.createVariable('depth', 'f8', ('ny', 'nx')) +f_topo.createVariable('h2', 'f8', ('ny', 'nx')) +f_topo.variables['depth'][:] = topo_ +f_topo.sync() +f_topo.close() + +x_ = np.arange(0, 2*nx + 1) * ds # units are degrees E +y_ = np.arange(0, 2*ny + 1) * ds # units are degrees N +x, y = np.meshgrid(x_, y_) + +dx = np.zeros((2*ny + 1, 2*nx)) +dy = np.zeros((2*ny, 2*nx + 1)) +rad_deg = np.pi / 180. +dx[:] = (rad_deg * Re * (x[:, 1:] - x[:, 0:-1]) + * np.cos(0.5*rad_deg*(y[:, 0:-1] + y[:, 1:]))) +dy[:] = rad_deg * Re * (y[1:, :] - y[0:-1, :]) + +f_sg = nc.Dataset('ocean_hgrid.nc', 'w', format='NETCDF3_CLASSIC') +f_sg.createDimension('ny', 2*ny) +f_sg.createDimension('nx', 2*nx) +f_sg.createDimension('nyp', 2*ny + 1) +f_sg.createDimension('nxp', 2*nx + 1) +f_sg.createDimension('string', 5) +f_sg.createVariable('y', 'f8', ('nyp', 'nxp')) +f_sg.createVariable('x', 'f8', ('nyp', 'nxp')) +dyv = f_sg.createVariable('dy', 'f8', ('ny', 'nxp')) +dxv = f_sg.createVariable('dx', 'f8', ('nyp', 'nx')) +areav = f_sg.createVariable('area', 'f8', ('ny', 'nx')) +dxv.units = 'm' +dyv.units = 'm' +areav.units = 'm2' +f_sg.createVariable('angle_dx', 'f8', ('nyp', 'nxp')) +f_sg.createVariable('tile', 'S1', ('string')) +f_sg.variables['y'].units = 'degrees' +f_sg.variables['x'].units = 'degrees' +f_sg.variables['dy'].units = 'meters' +f_sg.variables['dx'].units = 'meters' +f_sg.variables['area'].units = 'm2' +f_sg.variables['angle_dx'].units = 'degrees' +f_sg.variables['y'][:] = y +f_sg.variables['x'][:] = x +f_sg.variables['dx'][:] = dx +f_sg.variables['dy'][:] = dy + +# Compute the area bounded by lines of constant +# latitude-longitud on a sphere in m2. +dlon = x_[1:] - x_[:-1] +dlon = np.tile(dlon[np.newaxis, :], (2*ny, 1)) +y1_ = y_[:-1] +y1_ = y1_[:, np.newaxis]*rad_deg +y2_ = y_[1:] +y2_ = y2_[:, np.newaxis]*rad_deg +y1_ = np.tile(y1_, (1, 2*nx)) +y2_ = np.tile(y2_, (1, 2*nx)) +area = rad_deg * Re * Re * (np.sin(y2_) - np.sin(y1_)) * dlon +f_sg.variables['area'][:] = area +f_sg.variables['angle_dx'][:] = 0. +str_ = stringtochar(np.array(['tile1'], dtype='S5')) +f_sg.variables['tile'][:] = str_ +f_sg.sync() +f_sg.close() diff --git a/.testing/_tc4/diag_table b/.testing/tc4/diag_table similarity index 100% rename from .testing/_tc4/diag_table rename to .testing/tc4/diag_table diff --git a/.testing/tc4/input.nml b/.testing/tc4/input.nml new file mode 100644 index 0000000000..0b30a7a5a6 --- /dev/null +++ b/.testing/tc4/input.nml @@ -0,0 +1,18 @@ +&mom_input_nml + output_directory = './' + input_filename = 'n' + restart_input_dir = 'INPUT/' + restart_output_dir = 'RESTART/' + parameter_filename = + 'MOM_input', + 'MOM_override', +/ + +&diag_manager_nml + flush_nc_files = .true. +/ + +&fms_nml + domains_stack_size = 710000 + stack_size = 0 +/ From a5082d55461e8dd1c159b8488777f1dd85a56027 Mon Sep 17 00:00:00 2001 From: Marshall Ward Date: Fri, 4 Oct 2019 20:13:59 -0400 Subject: [PATCH 2/9] Travis python support; tc4 Makefile The custom TC4 Makefile has been added (oops), and the presumed Python Ubuntu packages have been added for Travis. --- .testing/tc4/Makefile | 3 +++ .travis.yml | 1 + 2 files changed, 4 insertions(+) create mode 100644 .testing/tc4/Makefile diff --git a/.testing/tc4/Makefile b/.testing/tc4/Makefile new file mode 100644 index 0000000000..cea78bf3bd --- /dev/null +++ b/.testing/tc4/Makefile @@ -0,0 +1,3 @@ +all: + python build_grid.py + python build_data.py diff --git a/.travis.yml b/.travis.yml index 41d9d9b348..ac37117709 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,7 @@ addons: packages: - tcsh pkg-config netcdf-bin libnetcdf-dev libnetcdff-dev openmpi-bin libopenmpi-dev gfortran - doxygen graphviz flex bison cmake + - python-numpy python-netcdf4 jobs: include: From defb0c59dd4d54ab0a9b2c069f5550dd737a5181 Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Thu, 24 Oct 2019 21:15:09 -0400 Subject: [PATCH 3/9] Verify ENABLE_THERMODYNAMICS is True before posting C_p diagnostic --- src/diagnostics/MOM_diagnostics.F90 | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/diagnostics/MOM_diagnostics.F90 b/src/diagnostics/MOM_diagnostics.F90 index 7344a5e677..5a05a9a453 100644 --- a/src/diagnostics/MOM_diagnostics.F90 +++ b/src/diagnostics/MOM_diagnostics.F90 @@ -1859,7 +1859,8 @@ subroutine write_static_fields(G, GV, US, tv, diag) ! Local variables integer :: id - + logical :: use_temperature + id = register_static_field('ocean_model', 'geolat', diag%axesT1, & 'Latitude of tracer (T) points', 'degrees_north') if (id > 0) call post_data(id, G%geoLatT, diag, .true.) @@ -2011,12 +2012,15 @@ subroutine write_static_fields(G, GV, US, tv, diag) cmor_long_name='reference sea water density for boussinesq approximation') if (id > 0) call post_data(id, GV%Rho0, diag, .true.) - id = register_static_field('ocean_model','C_p', diag%axesNull, & - 'heat capacity of sea water', 'J kg-1 K-1', cmor_field_name='cpocean', & - cmor_standard_name='specific_heat_capacity_of_sea_water', & - cmor_long_name='specific_heat_capacity_of_sea_water') - if (id > 0) call post_data(id, tv%C_p, diag, .true.) - + use_temperature = associated(tv%T) + if (use_temperature) then + id = register_static_field('ocean_model','C_p', diag%axesNull, & + 'heat capacity of sea water', 'J kg-1 K-1', cmor_field_name='cpocean', & + cmor_standard_name='specific_heat_capacity_of_sea_water', & + cmor_long_name='specific_heat_capacity_of_sea_water') + if (id > 0) call post_data(id, tv%C_p, diag, .true.) + endif + end subroutine write_static_fields From 3c0d52ac8a93465f0ff3d29fb72587eb75d9a35e Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Fri, 25 Oct 2019 10:47:30 -0400 Subject: [PATCH 4/9] Make tc4 faster --- .testing/tc4/MOM_input | 9 +++++++-- .testing/tc4/build_data.py | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.testing/tc4/MOM_input b/.testing/tc4/MOM_input index 456783af88..deb496315a 100644 --- a/.testing/tc4/MOM_input +++ b/.testing/tc4/MOM_input @@ -7,10 +7,15 @@ USE_REGRIDDING = True ! [Boolean] default = False ! If True, use the ALE algorithm (regridding/remapping). If False, use the ! layered isopycnal algorithm. -DT = 300.0 ! [s] +DT = 1200.0 ! [s] ! The (baroclinic) dynamics time step. The time-step that is actually used will ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode ! or the coupling timestep in coupled mode.) +DT_THERM = 3600.0 ! [s] default = 300.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. C_P = 3925.0 ! [J kg-1 K-1] default = 3991.86795711963 ! The heat capacity of sea water, approximated as a constant. This is only used ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10 @@ -377,7 +382,7 @@ WIND_CONFIG = "zero" ! ! === module MOM_restart === ! === module MOM_main (MOM_driver) === -DAYMAX = 1.0 ! [days] +DAYMAX = 0.25 ! [days] ! The final time of the whole simulation, in units of TIMEUNIT seconds. This ! also sets the potential end time of the present run segment if the end time is ! not set via ocean_solo_nml in input.nml. diff --git a/.testing/tc4/build_data.py b/.testing/tc4/build_data.py index 50f45ce9f1..e060d05cb1 100644 --- a/.testing/tc4/build_data.py +++ b/.testing/tc4/build_data.py @@ -30,7 +30,7 @@ def t_fc(x, y, z, radius=5.0, tmag=1.0): ny, nx = x.shape -nz = 10 +nz = 3 z = (np.arange(nz) * zbot0) / nz temp = t_fc(x, y, z) From 817bdde76ccd24490786e5dd2554077b0b79f7e9 Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Fri, 25 Oct 2019 10:54:12 -0400 Subject: [PATCH 5/9] remove trailing whitespace --- src/diagnostics/MOM_diagnostics.F90 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/diagnostics/MOM_diagnostics.F90 b/src/diagnostics/MOM_diagnostics.F90 index 5a05a9a453..b8696969df 100644 --- a/src/diagnostics/MOM_diagnostics.F90 +++ b/src/diagnostics/MOM_diagnostics.F90 @@ -1860,7 +1860,7 @@ subroutine write_static_fields(G, GV, US, tv, diag) ! Local variables integer :: id logical :: use_temperature - + id = register_static_field('ocean_model', 'geolat', diag%axesT1, & 'Latitude of tracer (T) points', 'degrees_north') if (id > 0) call post_data(id, G%geoLatT, diag, .true.) @@ -2012,7 +2012,7 @@ subroutine write_static_fields(G, GV, US, tv, diag) cmor_long_name='reference sea water density for boussinesq approximation') if (id > 0) call post_data(id, GV%Rho0, diag, .true.) - use_temperature = associated(tv%T) + use_temperature = associated(tv%T) if (use_temperature) then id = register_static_field('ocean_model','C_p', diag%axesNull, & 'heat capacity of sea water', 'J kg-1 K-1', cmor_field_name='cpocean', & @@ -2020,7 +2020,7 @@ subroutine write_static_fields(G, GV, US, tv, diag) cmor_long_name='specific_heat_capacity_of_sea_water') if (id > 0) call post_data(id, tv%C_p, diag, .true.) endif - + end subroutine write_static_fields From 231f1204b035297bd8e1f37c112b5c9fb1c572ab Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Tue, 29 Oct 2019 10:16:38 -0400 Subject: [PATCH 6/9] add unit scaling --- .testing/tc4/MOM_input | 5 +++++ src/initialization/MOM_state_initialization.F90 | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.testing/tc4/MOM_input b/.testing/tc4/MOM_input index deb496315a..2b08e9bccb 100644 --- a/.testing/tc4/MOM_input +++ b/.testing/tc4/MOM_input @@ -386,6 +386,11 @@ DAYMAX = 0.25 ! [days] ! The final time of the whole simulation, in units of TIMEUNIT seconds. This ! also sets the potential end time of the present run segment if the end time is ! not set via ocean_solo_nml in input.nml. + +ENERGYSAVEDAYS = 0.125 ! [days] default = 1.44E+04 + ! The interval in units of TIMEUNIT between saves of the + ! energies of the run and other globally summed diagnostics. + RESTART_CONTROL = 3 ! default = 1 ! An integer whose bits encode which restart files are written. Add 2 (bit 1) ! for a time-stamped file, and odd (bit 0) for a non-time-stamped file. A diff --git a/src/initialization/MOM_state_initialization.F90 b/src/initialization/MOM_state_initialization.F90 index 1f5401ee58..bfebf95c08 100644 --- a/src/initialization/MOM_state_initialization.F90 +++ b/src/initialization/MOM_state_initialization.F90 @@ -1864,8 +1864,8 @@ subroutine initialize_sponges_file(G, GV, US, use_temperature, tv, param_file, C call MOM_read_data(filename, salin_var, tmp(:,:,:), G%Domain) call set_up_sponge_field(tmp, tv%S, G, nz, CSp) elseif (use_temperature) then - call set_up_ALE_sponge_field(filename, potemp_var, Time, G, GV, tv%T, ALE_CSp) - call set_up_ALE_sponge_field(filename, salin_var, Time, G, GV, tv%S, ALE_CSp) + call set_up_ALE_sponge_field(filename, potemp_var, Time, G, GV, US, tv%T, ALE_CSp) + call set_up_ALE_sponge_field(filename, salin_var, Time, G, GV, US, tv%S, ALE_CSp) endif end subroutine initialize_sponges_file From c4606e1d03143a0bdefa0690541393612d7aa0be Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Tue, 29 Oct 2019 10:17:42 -0400 Subject: [PATCH 7/9] fix restart fail for tc4 and some cleanup --- .../vertical/MOM_ALE_sponge.F90 | 223 ++++++------------ 1 file changed, 66 insertions(+), 157 deletions(-) diff --git a/src/parameterizations/vertical/MOM_ALE_sponge.F90 b/src/parameterizations/vertical/MOM_ALE_sponge.F90 index 17b601427c..89229c642d 100644 --- a/src/parameterizations/vertical/MOM_ALE_sponge.F90 +++ b/src/parameterizations/vertical/MOM_ALE_sponge.F90 @@ -11,7 +11,6 @@ module MOM_ALE_sponge ! This file is part of MOM6. See LICENSE.md for the license. - use MOM_coms, only : sum_across_PEs use MOM_diag_mediator, only : post_data, query_averaging_enabled, register_diag_field use MOM_diag_mediator, only : diag_ctrl @@ -24,7 +23,6 @@ module MOM_ALE_sponge use MOM_remapping, only : remapping_cs, remapping_core_h, initialize_remapping use MOM_unit_scaling, only : unit_scale_type use MOM_verticalGrid, only : verticalGrid_type -! GMM - Planned extension: Support for time varying sponge targets. implicit none ; private @@ -129,7 +127,7 @@ module MOM_ALE_sponge type(remapping_cs) :: remap_cs !< Remapping parameters and work arrays - logical :: new_sponges !< True if using newer sponge code + logical :: time_varying_sponges !< True if using newer sponge code logical :: spongeDataOngrid !< True if the sponge data are on the model horizontal grid end type ALE_sponge_CS @@ -195,7 +193,7 @@ subroutine initialize_ALE_sponge_fixed(Iresttime, G, param_file, CS, data_h, nz_ "PPM reconstruction will also be used within boundary cells.", & default=.false., do_not_log=.true.) - CS%new_sponges = .false. + CS%time_varying_sponges = .false. CS%nz = G%ke CS%isc = G%isc ; CS%iec = G%iec ; CS%jsc = G%jsc ; CS%jec = G%jec CS%isd = G%isd ; CS%ied = G%ied ; CS%jsd = G%jsd ; CS%jed = G%jed @@ -370,7 +368,7 @@ subroutine get_ALE_sponge_thicknesses(G, data_h, sponge_mask, CS) end subroutine get_ALE_sponge_thicknesses -!> This subroutine determines the number of points which are within sponges in this computational +!> This subroutine determines the number of points which are to be restoref in the computational !! domain. Only points that have positive values of Iresttime and which mask2dT indicates are ocean !! points are included in the sponges. subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) @@ -382,8 +380,6 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) type(ALE_sponge_CS), pointer :: CS !< A pointer that is set to point to the control !! structure for this module (in/out). - - ! This include declares and sets the variable "version". #include "version_variable.h" character(len=40) :: mdl = "MOM_sponge" ! This module's name. @@ -394,45 +390,38 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) logical :: spongeDataOngrid = .false. integer :: i, j, k, col, total_sponge_cols, total_sponge_cols_u, total_sponge_cols_v character(len=10) :: remapScheme + if (associated(CS)) then call MOM_error(WARNING, "initialize_sponge called with an associated "// & "control structure.") return endif - ! Set default, read and log parameters call log_version(param_file, mdl, version, "") call get_param(param_file, mdl, "SPONGE", use_sponge, & "If true, sponges may be applied anywhere in the domain. "//& "The exact location and properties of those sponges are "//& "specified from MOM_initialization.F90.", default=.false.) - if (.not.use_sponge) return - allocate(CS) - call get_param(param_file, mdl, "SPONGE_UV", CS%sponge_uv, & "Apply sponges in u and v, in addition to tracers.", & default=.false.) - call get_param(param_file, mdl, "REMAPPING_SCHEME", remapScheme, & "This sets the reconstruction scheme used "//& " for vertical remapping for all variables.", & default="PLM", do_not_log=.true.) - call get_param(param_file, mdl, "BOUNDARY_EXTRAPOLATION", bndExtrapolation, & "When defined, a proper high-order reconstruction "//& "scheme is used within boundary cells rather "//& "than PCM. E.g., if PPM is used for remapping, a "//& "PPM reconstruction will also be used within boundary cells.", & default=.false., do_not_log=.true.) - call get_param(param_file, mdl, "SPONGE_DATA_ONGRID", CS%spongeDataOngrid, & "When defined, the incoming sponge data are "//& "assumed to be on the model grid " , & default=.false.) - - CS%new_sponges = .true. + CS%time_varying_sponges = .true. CS%nz = G%ke CS%isc = G%isc ; CS%iec = G%iec ; CS%jsc = G%jsc ; CS%jec = G%jec CS%isd = G%isd ; CS%ied = G%ied ; CS%jsd = G%jsd ; CS%jed = G%jed @@ -444,8 +433,6 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) if ((Iresttime(i,j)>0.0) .and. (G%mask2dT(i,j)>0)) & CS%num_col = CS%num_col + 1 enddo ; enddo - - if (CS%num_col > 0) then allocate(CS%Iresttime_col(CS%num_col)) ; CS%Iresttime_col = 0.0 allocate(CS%col_i(CS%num_col)) ; CS%col_i = 0 @@ -460,21 +447,16 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) endif enddo ; enddo endif - total_sponge_cols = CS%num_col call sum_across_PEs(total_sponge_cols) ! Call the constructor for remapping control structure call initialize_remapping(CS%remap_cs, remapScheme, boundary_extrapolation=bndExtrapolation) - call log_param(param_file, mdl, "!Total sponge columns at h points", total_sponge_cols, & "The total number of columns where sponges are applied at h points.") - if (CS%sponge_uv) then - allocate(Iresttime_u(G%isdB:G%iedB,G%jsd:G%jed)); Iresttime_u(:,:)=0.0 allocate(Iresttime_v(G%isd:G%ied,G%jsdB:G%jedB)); Iresttime_v(:,:)=0.0 - ! u points CS%num_col_u = 0 ; !CS%fldno_u = 0 do j=CS%jsc,CS%jec; do I=CS%iscB,CS%iecB @@ -482,13 +464,10 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) if ((Iresttime_u(I,j)>0.0) .and. (G%mask2dCu(I,j)>0)) & CS%num_col_u = CS%num_col_u + 1 enddo ; enddo - if (CS%num_col_u > 0) then - allocate(CS%Iresttime_col_u(CS%num_col_u)) ; CS%Iresttime_col_u = 0.0 allocate(CS%col_i_u(CS%num_col_u)) ; CS%col_i_u = 0 allocate(CS%col_j_u(CS%num_col_u)) ; CS%col_j_u = 0 - ! pass indices, restoring time to the CS structure col = 1 do j=CS%jsc,CS%jec ; do I=CS%iscB,CS%iecB @@ -498,15 +477,12 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) col = col +1 endif enddo ; enddo - ! same for total number of arbritary layers and correspondent data - endif total_sponge_cols_u = CS%num_col_u call sum_across_PEs(total_sponge_cols_u) call log_param(param_file, mdl, "!Total sponge columns at u points", total_sponge_cols_u, & "The total number of columns where sponges are applied at u points.") - ! v points CS%num_col_v = 0 ; !CS%fldno_v = 0 do J=CS%jscB,CS%jecB; do i=CS%isc,CS%iec @@ -514,13 +490,10 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) if ((Iresttime_v(i,J)>0.0) .and. (G%mask2dCv(i,J)>0)) & CS%num_col_v = CS%num_col_v + 1 enddo ; enddo - if (CS%num_col_v > 0) then - allocate(CS%Iresttime_col_v(CS%num_col_v)) ; CS%Iresttime_col_v = 0.0 allocate(CS%col_i_v(CS%num_col_v)) ; CS%col_i_v = 0 allocate(CS%col_j_v(CS%num_col_v)) ; CS%col_j_v = 0 - ! pass indices, restoring time to the CS structure col = 1 do J=CS%jscB,CS%jecB ; do i=CS%isc,CS%iec @@ -530,7 +503,6 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) col = col +1 endif enddo ; enddo - endif total_sponge_cols_v = CS%num_col_v call sum_across_PEs(total_sponge_cols_v) @@ -594,7 +566,7 @@ end subroutine set_up_ALE_sponge_field_fixed !> This subroutine stores the reference profile at h points for the variable !! whose address is given by filename and fieldname. -subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, f_ptr, CS) +subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, US, f_ptr, CS) character(len=*), intent(in) :: filename !< The name of the file with the !! time varying field data character(len=*), intent(in) :: fieldname !< The name of the field in the file @@ -602,6 +574,7 @@ subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, f_p type(time_type), intent(in) :: Time !< The current model time type(ocean_grid_type), intent(in) :: G !< Grid structure (in). type(verticalGrid_type), intent(in) :: GV !< ocean vertical grid structure + type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type real, dimension(SZI_(G),SZJ_(G),SZK_(G)), & target, intent(in) :: f_ptr !< Pointer to the field to be damped (in). type(ALE_sponge_CS), pointer :: CS !< Sponge control structure (in/out). @@ -617,101 +590,42 @@ subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, f_p integer, dimension(4) :: fld_sz integer :: nz_data !< the number of vertical levels in this input field character(len=256) :: mesg ! String for error messages - ! Local variables for ALE remapping - real, dimension(:), allocatable :: hsrc ! Source thicknesses [Z ~> m]. real, dimension(:), allocatable :: tmpT1d real :: zTopOfCell, zBottomOfCell ! Heights [Z ~> m]. type(remapping_CS) :: remapCS ! Remapping parameters and work arrays if (.not.associated(CS)) return - - ! Call this in case it was not previously done. + ! initialize time interpolator module call time_interp_external_init() - isd = G%isd; ied = G%ied; jsd = G%jsd; jed = G%jed CS%fldno = CS%fldno + 1 - if (CS%fldno > MAX_FIELDS_) then write(mesg,'("Increase MAX_FIELDS_ to at least ",I3," in MOM_memory.h or decrease & &the number of fields to be damped in the call to & &initialize_sponge." )') CS%fldno call MOM_error(FATAL,"set_up_ALE_sponge_field: "//mesg) endif - - ! get a unique id for this field which will allow us to return an array - ! containing time-interpolated values from an external file corresponding - ! to the current model date. - + ! get a unique time interp id for this field. If sponge data is ongrid, then setup + ! to only read on the computational domain if (CS%spongeDataOngrid) then CS%Ref_val(CS%fldno)%id = init_external_field(filename, fieldname,domain=G%Domain%mpp_domain) else CS%Ref_val(CS%fldno)%id = init_external_field(filename, fieldname) endif - fld_sz(1:4)=-1 fld_sz = get_external_field_size(CS%Ref_val(CS%fldno)%id) nz_data = fld_sz(3) - CS%Ref_val(CS%fldno)%nz_data = nz_data !< each individual sponge field is assumed to reside on a different grid + CS%Ref_val(CS%fldno)%nz_data = nz_data !< individual sponge fields may reside on a different vertical grid CS%Ref_val(CS%fldno)%num_tlevs = fld_sz(4) - - allocate( sp_val(isd:ied,jsd:jed, nz_data) ) - allocate( mask_z(isd:ied,jsd:jed, nz_data) ) - - ! initializes the current reference profile array + ! initializes the target profile array for this field + ! for all columns which will be masked allocate(CS%Ref_val(CS%fldno)%p(nz_data,CS%num_col)) CS%Ref_val(CS%fldno)%p(:,:) = 0.0 allocate( CS%Ref_val(CS%fldno)%h(nz_data,CS%num_col) ) CS%Ref_val(CS%fldno)%h(:,:) = 0.0 - - ! Interpolate external file data to the model grid - ! I am hard-wiring this call to assume that the input grid is zonally re-entrant - ! In the future, this should be generalized using an interface to return the - ! modulo attribute of the zonal axis (mjh). - -! call horiz_interp_and_extrap_tracer(CS%Ref_val(CS%fldno)%id,Time, 1.0,G,sp_val,mask_z,z_in,z_edges_in, & -! missing_value, .true., .false., .false., m_to_Z=US%m_to_Z) - - ! Do not think halo updates are needed (mjh) -! call pass_var(sp_val,G%Domain) -! call pass_var(mask_z,G%Domain) - - ! Done with horizontal interpolation. - ! Now remap to model coordinates - ! First we reserve a work space for reconstructions of the source data - allocate( hsrc(nz_data) ) - allocate( tmpT1d(nz_data) ) - - do col=1,CS%num_col - ! Build the source grid - zTopOfCell = 0. ; zBottomOfCell = 0. ; nPoints = 0; hsrc(:) = 0.0; tmpT1d(:) = -99.9 - do k=1,nz_data - if (mask_z(CS%col_i(col),CS%col_j(col),k) == 1.0) then - zBottomOfCell = -min( z_edges_in(k+1), G%bathyT(CS%col_i(col),CS%col_j(col)) ) -! tmpT1d(k) = sp_val(CS%col_i(col),CS%col_j(col),k) - elseif (k>1) then - zBottomOfCell = -G%bathyT(CS%col_i(col),CS%col_j(col)) -! tmpT1d(k) = tmpT1d(k-1) -! else ! This next block should only ever be reached over land -! tmpT1d(k) = -99.9 - endif - hsrc(k) = zTopOfCell - zBottomOfCell - if (hsrc(k)>0.) nPoints = nPoints + 1 - zTopOfCell = zBottomOfCell ! Bottom becomes top for next value of k - enddo - ! In case data is deeper than model - hsrc(nz_data) = hsrc(nz_data) + ( zTopOfCell + G%bathyT(CS%col_i(col),CS%col_j(col)) ) - CS%Ref_val(CS%fldno)%h(1:nz_data,col) = 0. - CS%Ref_val(CS%fldno)%p(1:nz_data,col) = -1.e24 - CS%Ref_val(CS%fldno)%h(1:nz_data,col) = GV%Z_to_H*hsrc(1:nz_data) -! CS%Ref_val(CS%fldno)%p(1:nz_data,col) = tmpT1d(1:nz_data) - enddo - CS%var(CS%fldno)%p => f_ptr - deallocate( hSrc ) - deallocate( tmpT1d ) - deallocate(sp_val, mask_z) end subroutine set_up_ALE_sponge_field_varying @@ -740,9 +654,7 @@ subroutine set_up_ALE_sponge_vel_field_fixed(u_val, v_val, G, u_ptr, v_ptr, CS) CS%Ref_val_u%p(k,col) = u_val(CS%col_i_u(col),CS%col_j_u(col),k) enddo enddo - CS%var_u%p => u_ptr - allocate(CS%Ref_val_v%p(CS%nz_data,CS%num_col_v)) CS%Ref_val_v%p(:,:) = 0.0 do col=1,CS%num_col_v @@ -750,7 +662,6 @@ subroutine set_up_ALE_sponge_vel_field_fixed(u_val, v_val, G, u_ptr, v_ptr, CS) CS%Ref_val_v%p(k,col) = v_val(CS%col_i_v(col),CS%col_j_v(col),k) enddo enddo - CS%var_v%p => v_ptr end subroutine set_up_ALE_sponge_vel_field_fixed @@ -788,46 +699,36 @@ subroutine set_up_ALE_sponge_vel_field_varying(filename_u, fieldname_u, filename isd = G%isd; ied = G%ied; jsd = G%jsd; jed = G%jed isdB = G%isdB; iedB = G%iedB; jsdB = G%jsdB; jedB = G%jedB - ! get a unique id for this field which will allow us to return an array ! containing time-interpolated values from an external file corresponding ! to the current model date. - CS%Ref_val_u%id = init_external_field(filename_u, fieldname_u) fld_sz(1:4)=-1 fld_sz = get_external_field_size(CS%Ref_val_u%id) CS%Ref_val_u%nz_data = fld_sz(3) CS%Ref_val_u%num_tlevs = fld_sz(4) - CS%Ref_val_v%id = init_external_field(filename_v, fieldname_v) fld_sz(1:4)=-1 fld_sz = get_external_field_size(CS%Ref_val_v%id) CS%Ref_val_v%nz_data = fld_sz(3) CS%Ref_val_v%num_tlevs = fld_sz(4) - allocate( u_val(isdB:iedB,jsd:jed, fld_sz(3)) ) allocate( mask_u(isdB:iedB,jsd:jed, fld_sz(3)) ) allocate( v_val(isd:ied,jsdB:jedB, fld_sz(3)) ) allocate( mask_v(isd:ied,jsdB:jedB, fld_sz(3)) ) - ! Interpolate external file data to the model grid ! I am hard-wiring this call to assume that the input grid is zonally re-entrant ! In the future, this should be generalized using an interface to return the ! modulo attribute of the zonal axis (mjh). - call horiz_interp_and_extrap_tracer(CS%Ref_val_u%id,Time, 1.0,G,u_val,mask_u,z_in,z_edges_in,& missing_value,.true.,.false.,.false., m_to_Z=US%m_to_Z) - !!! TODO: add a velocity interface! (mjh) - ! Interpolate external file data to the model grid ! I am hard-wiring this call to assume that the input grid is zonally re-entrant ! In the future, this should be generalized using an interface to return the ! modulo attribute of the zonal axis (mjh). - call horiz_interp_and_extrap_tracer(CS%Ref_val_v%id,Time, 1.0,G,v_val,mask_v,z_in,z_edges_in, & missing_value,.true.,.false.,.false., m_to_Z=US%m_to_Z) - ! stores the reference profile allocate(CS%Ref_val_u%p(fld_sz(3),CS%num_col_u)) CS%Ref_val_u%p(:,:) = 0.0 @@ -836,9 +737,7 @@ subroutine set_up_ALE_sponge_vel_field_varying(filename_u, fieldname_u, filename CS%Ref_val_u%p(k,col) = u_val(CS%col_i_u(col),CS%col_j_u(col),k) enddo enddo - CS%var_u%p => u_ptr - allocate(CS%Ref_val_v%p(fld_sz(3),CS%num_col_v)) CS%Ref_val_v%p(:,:) = 0.0 do col=1,CS%num_col_v @@ -846,7 +745,6 @@ subroutine set_up_ALE_sponge_vel_field_varying(filename_u, fieldname_u, filename CS%Ref_val_v%p(k,col) = v_val(CS%col_i_v(col),CS%col_j_v(col),k) enddo enddo - CS%var_v%p => v_ptr end subroutine set_up_ALE_sponge_vel_field_varying @@ -874,13 +772,18 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) real :: hv(SZI_(G), SZJB_(G), SZK_(G)) ! A temporary array for h at v pts real, allocatable, dimension(:,:,:) :: sp_val ! A temporary array for fields real, allocatable, dimension(:,:,:) :: mask_z ! A temporary array for field mask at h pts + real, dimension(:), allocatable :: hsrc ! Source thicknesses [Z ~> m]. + ! Local variables for ALE remapping + real, dimension(:), allocatable :: tmpT1d integer :: c, m, nkmb, i, j, k, is, ie, js, je, nz, nz_data + integer :: col, total_sponge_cols real, allocatable, dimension(:), target :: z_in, z_edges_in real :: missing_value real :: h_neglect, h_neglect_edge - + real :: zTopOfCell, zBottomOfCell ! Heights [Z ~> m]. + integer :: nPoints + is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec ; nz = G%ke - if (.not.associated(CS)) return if (GV%Boussinesq) then @@ -889,46 +792,57 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) h_neglect = GV%kg_m2_to_H*1.0e-30 ; h_neglect_edge = GV%kg_m2_to_H*1.0e-10 endif - if (CS%new_sponges) then + if (CS%time_varying_sponges) then if (.not. present(Time)) & call MOM_error(FATAL,"apply_ALE_sponge: No time information provided") - -! Interpolate new grid in time-space do m=1,CS%fldno - - nz_data = CS%Ref_val(m)%nz_data - allocate(sp_val(G%isd:G%ied,G%jsd:G%jed,1:nz_data)) - allocate(mask_z(G%isd:G%ied,G%jsd:G%jed,1:nz_data)) - sp_val(:,:,:)=0.0 - mask_z(:,:,:)=0.0 - - call horiz_interp_and_extrap_tracer(CS%Ref_val(m)%id,Time, 1.0,G,sp_val,mask_z,z_in,z_edges_in, & - missing_value,.true., .false.,.false., m_to_Z=US%m_to_Z,spongeOnGrid=CS%SpongeDataOngrid) - -! call pass_var(sp_val,G%Domain) -! call pass_var(mask_z,G%Domain) - - - do c=1,CS%num_col - i = CS%col_i(c) ; j = CS%col_j(c) - CS%Ref_val(m)%p(1:nz_data,c) = sp_val(i,j,1:nz_data) - do k=2,nz_data -! if (mask_z(i,j,k)==0.) & - if (CS%Ref_val(m)%h(k,c) <= 0.001*GV%m_to_H) & - ! some confusion here about why the masks are not correct returning from horiz_interp - ! reverting to using a minimum thickness criteria - CS%Ref_val(m)%p(k,c) = CS%Ref_val(m)%p(k-1,c) - enddo + nz_data = CS%Ref_val(m)%nz_data + allocate(sp_val(G%isd:G%ied,G%jsd:G%jed,1:nz_data)) + allocate(mask_z(G%isd:G%ied,G%jsd:G%jed,1:nz_data)) + sp_val(:,:,:)=0.0 + mask_z(:,:,:)=0.0 + call horiz_interp_and_extrap_tracer(CS%Ref_val(m)%id,Time, 1.0,G,sp_val,mask_z,z_in,z_edges_in, & + missing_value,.true., .false.,.false.,spongeOnGrid=CS%SpongeDataOngrid, m_to_Z=US%m_to_Z) + allocate( hsrc(nz_data) ) + allocate( tmpT1d(nz_data) ) + do c=1,CS%num_col + i = CS%col_i(c) ; j = CS%col_j(c) + CS%Ref_val(m)%p(1:nz_data,c) = sp_val(i,j,1:nz_data) + ! Build the source grid + zTopOfCell = 0. ; zBottomOfCell = 0. ; nPoints = 0; hsrc(:) = 0.0; tmpT1d(:) = -99.9 + do k=1,nz_data + if (mask_z(CS%col_i(c),CS%col_j(c),k) == 1.0) then + zBottomOfCell = -min( z_edges_in(k+1), G%bathyT(CS%col_i(c),CS%col_j(c)) ) + tmpT1d(k) = sp_val(CS%col_i(c),CS%col_j(c),k) + elseif (k>1) then + zBottomOfCell = -G%bathyT(CS%col_i(c),CS%col_j(c)) + tmpT1d(k) = tmpT1d(k-1) + else ! This next block should only ever be reached over land + tmpT1d(k) = -99.9 + endif + hsrc(k) = zTopOfCell - zBottomOfCell + if (hsrc(k)>0.) nPoints = nPoints + 1 + zTopOfCell = zBottomOfCell ! Bottom becomes top for next value of k + enddo + ! In case data is deeper than model + hsrc(nz_data) = hsrc(nz_data) + ( zTopOfCell + G%bathyT(CS%col_i(c),CS%col_j(c)) ) + CS%Ref_val(CS%fldno)%h(1:nz_data,c) = GV%Z_to_H*hsrc(1:nz_data) + CS%Ref_val(CS%fldno)%p(1:nz_data,c) = tmpT1d(1:nz_data) + do k=2,nz_data + ! if (mask_z(i,j,k)==0.) & + if (CS%Ref_val(m)%h(k,c) <= 0.001*GV%m_to_H) & + ! some confusion here about why the masks are not correct returning from horiz_interp + ! reverting to using a minimum thickness criteria + CS%Ref_val(m)%p(k,c) = CS%Ref_val(m)%p(k-1,c) + enddo enddo - - deallocate(sp_val, mask_z) + deallocate(sp_val, mask_z, hsrc, tmpT1d) enddo else nz_data = CS%nz_data endif allocate(tmp_val2(nz_data)) - do m=1,CS%fldno do c=1,CS%num_col ! c is an index for the next 3 lines but a multiplier for the rest of the loop @@ -937,7 +851,7 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) damp = dt*CS%Iresttime_col(c) I1pdamp = 1.0 / (1.0 + damp) tmp_val2(1:nz_data) = CS%Ref_val(m)%p(1:nz_data,c) - if (CS%new_sponges) then + if (CS%time_varying_sponges) then call remapping_core_h(CS%remap_cs, nz_data, CS%Ref_val(m)%h(1:nz_data,c), tmp_val2, & CS%nz, h(i,j,:), tmp_val1, h_neglect, h_neglect_edge) else @@ -946,7 +860,6 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) endif !Backward Euler method CS%var(m)%p(i,j,1:CS%nz) = I1pdamp * (CS%var(m)%p(i,j,1:CS%nz) + tmp_val1 * damp) - enddo enddo @@ -958,13 +871,11 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) !enddo if (CS%sponge_uv) then - ! u points do j=CS%jsc,CS%jec; do I=CS%iscB,CS%iecB; do k=1,nz hu(I,j,k) = 0.5 * (h(i,j,k) + h(i+1,j,k)) enddo ; enddo ; enddo - - if (CS%new_sponges) then + if (CS%time_varying_sponges) then if (.not. present(Time)) & call MOM_error(FATAL,"apply_ALE_sponge: No time information provided") @@ -974,10 +885,8 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) ! Interpolate from the external horizontal grid and in time call horiz_interp_and_extrap_tracer(CS%Ref_val_u%id,Time, 1.0,G,sp_val,mask_z,z_in,z_edges_in, & missing_value, .true., .false., .false., m_to_Z=US%m_to_Z) - ! call pass_var(sp_val,G%Domain) ! call pass_var(mask_z,G%Domain) - do c=1,CS%num_col ! c is an index for the next 3 lines but a multiplier for the rest of the loop ! Therefore we use c as per C code and increment the index where necessary. @@ -1014,9 +923,9 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) i = CS%col_i_u(c) ; j = CS%col_j_u(c) damp = dt*CS%Iresttime_col_u(c) I1pdamp = 1.0 / (1.0 + damp) - if (CS%new_sponges) nz_data = CS%Ref_val(m)%nz_data + if (CS%time_varying_sponges) nz_data = CS%Ref_val(m)%nz_data tmp_val2(1:nz_data) = CS%Ref_val_u%p(1:nz_data,c) - if (CS%new_sponges) then + if (CS%time_varying_sponges) then call remapping_core_h(CS%remap_cs, nz_data, CS%Ref_val_u%h(:,c), tmp_val2, & CS%nz, hu(i,j,:), tmp_val1, h_neglect, h_neglect_edge) else @@ -1037,7 +946,7 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) damp = dt*CS%Iresttime_col_v(c) I1pdamp = 1.0 / (1.0 + damp) tmp_val2(1:nz_data) = CS%Ref_val_v%p(1:nz_data,c) - if (CS%new_sponges) then + if (CS%time_varying_sponges) then call remapping_core_h(CS%remap_cs, CS%nz_data, CS%Ref_val_v%h(:,c), tmp_val2, & CS%nz, hv(i,j,:), tmp_val1, h_neglect, h_neglect_edge) else From 251b0d91cb451c9ccc745b23f97aa0b4cddde644 Mon Sep 17 00:00:00 2001 From: Matthew Harrison Date: Tue, 29 Oct 2019 14:57:32 -0400 Subject: [PATCH 8/9] remove trailiny ws --- src/parameterizations/vertical/MOM_ALE_sponge.F90 | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/parameterizations/vertical/MOM_ALE_sponge.F90 b/src/parameterizations/vertical/MOM_ALE_sponge.F90 index 89229c642d..e29515fd8d 100644 --- a/src/parameterizations/vertical/MOM_ALE_sponge.F90 +++ b/src/parameterizations/vertical/MOM_ALE_sponge.F90 @@ -10,6 +10,7 @@ module MOM_ALE_sponge + ! This file is part of MOM6. See LICENSE.md for the license. use MOM_coms, only : sum_across_PEs use MOM_diag_mediator, only : post_data, query_averaging_enabled, register_diag_field @@ -390,7 +391,7 @@ subroutine initialize_ALE_sponge_varying(Iresttime, G, param_file, CS) logical :: spongeDataOngrid = .false. integer :: i, j, k, col, total_sponge_cols, total_sponge_cols_u, total_sponge_cols_v character(len=10) :: remapScheme - + if (associated(CS)) then call MOM_error(WARNING, "initialize_sponge called with an associated "// & "control structure.") @@ -574,7 +575,7 @@ subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, US, type(time_type), intent(in) :: Time !< The current model time type(ocean_grid_type), intent(in) :: G !< Grid structure (in). type(verticalGrid_type), intent(in) :: GV !< ocean vertical grid structure - type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type + type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type real, dimension(SZI_(G),SZJ_(G),SZK_(G)), & target, intent(in) :: f_ptr !< Pointer to the field to be damped (in). type(ALE_sponge_CS), pointer :: CS !< Sponge control structure (in/out). @@ -596,7 +597,7 @@ subroutine set_up_ALE_sponge_field_varying(filename, fieldname, Time, G, GV, US, type(remapping_CS) :: remapCS ! Remapping parameters and work arrays if (.not.associated(CS)) return - ! initialize time interpolator module + ! initialize time interpolator module call time_interp_external_init() isd = G%isd; ied = G%ied; jsd = G%jsd; jed = G%jed CS%fldno = CS%fldno + 1 @@ -774,7 +775,7 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) real, allocatable, dimension(:,:,:) :: mask_z ! A temporary array for field mask at h pts real, dimension(:), allocatable :: hsrc ! Source thicknesses [Z ~> m]. ! Local variables for ALE remapping - real, dimension(:), allocatable :: tmpT1d + real, dimension(:), allocatable :: tmpT1d integer :: c, m, nkmb, i, j, k, is, ie, js, je, nz, nz_data integer :: col, total_sponge_cols real, allocatable, dimension(:), target :: z_in, z_edges_in @@ -782,7 +783,7 @@ subroutine apply_ALE_sponge(h, dt, G, GV, US, CS, Time) real :: h_neglect, h_neglect_edge real :: zTopOfCell, zBottomOfCell ! Heights [Z ~> m]. integer :: nPoints - + is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec ; nz = G%ke if (.not.associated(CS)) return From 18e589420875d7909916c29cb14f0c4dc121966c Mon Sep 17 00:00:00 2001 From: Marshall Ward Date: Wed, 13 Nov 2019 12:00:40 -0500 Subject: [PATCH 9/9] Enable tc4.restart test --- .testing/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.testing/Makefile b/.testing/Makefile index 16958cebea..924f983dfb 100644 --- a/.testing/Makefile +++ b/.testing/Makefile @@ -182,7 +182,7 @@ test: $(foreach t,$(TESTS),test.$(t)) test.regressions: $(foreach c,$(CONFIGS),$(c).regression $(c).regression.diag) test.grids: $(foreach c,$(filter-out tc3,$(CONFIGS)),$(c).grid $(c).grid.diag) test.layouts: $(foreach c,$(CONFIGS),$(c).layout $(c).layout.diag) -test.restarts: $(foreach c,$(filter-out tc4,$(CONFIGS)),$(c).restart) +test.restarts: $(foreach c,$(CONFIGS),$(c).restart) test.repros: $(foreach c,$(CONFIGS),$(c).repro $(c).repro.diag) test.openmps: $(foreach c,$(CONFIGS),$(c).openmp $(c).openmp.diag) test.nans: $(foreach c,$(CONFIGS),$(c).nan $(c).nan.diag)