Skip to content

Commit

Permalink
Merge pull request #6 from jukent/reuse
Browse files Browse the repository at this point in the history
code reuse
  • Loading branch information
jukent authored Aug 7, 2024
2 parents 5f8a91e + 1893ba1 commit a529c1d
Show file tree
Hide file tree
Showing 9 changed files with 116 additions and 273 deletions.
12 changes: 12 additions & 0 deletions notebooks/display_source.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from IPython.display import display, HTML
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
import inspect

def display_source(obj, color=True):
obj_src = inspect.getsource(obj)
if color:
display(HTML(highlight(obj_src, PythonLexer(), HtmlFormatter())))
else:
print(obj_src)
57 changes: 57 additions & 0 deletions notebooks/module.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import numpy as np

def adjust_pop_grid(tlon,tlat,field):
"""
Adjusts the grid of longitude and latitude values, along with the corresponding field data.
Parameters
----------
tlon : numpy.ndarray
2D array of longitude values.
tlat : numpy.ndarray
2D array of latitude values.
field : numpy.ma.MaskedArray
2D array of field data (e.g., temperature, salinity) corresponding to the tlon and tlat arrays.
Returns
-------
lon : numpy.ndarray
Adjusted 2D array of longitude values.
lat : numpy.ndarray
Adjusted 2D array of latitude values.
field : numpy.ma.MaskedArray
Adjusted 2D array of field data.
Example
-------
>>> lon, lat, field = adjust_pop_grid(tlon, tlat, field)
"""
nj = tlon.shape[0]
ni = tlon.shape[1]
xL = int(ni/2 - 1)
xR = int(xL + ni)

tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)
lon = np.concatenate((tlon,tlon+360.),1)
lon = lon[:,xL:xR]

if ni == 320:
lon[367:-3,0] = lon[367:-3,0]+360.
lon = lon - 360.
lon = np.hstack((lon,lon[:,0:1]+360.))
if ni == 320:
lon[367:,-1] = lon[367:,-1] - 360.

# Trick cartopy into doing the right thing:
# it gets confused when the cyclic coords are identical
lon[:,0] = lon[:,0]-1e-8

# Periodicity
lat = np.concatenate((tlat,tlat),1)
lat = lat[:,xL:xR]
lat = np.hstack((lat,lat[:,0:1]))

field = np.ma.concatenate((field,field),1)
field = field[:,xL:xR]
field = np.ma.hstack((field,field[:,0:1]))
return lon,lat,field
43 changes: 3 additions & 40 deletions notebooks/ocn-carbonfluxes.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,9 @@
"import dask\n",
"import distributed\n",
"import s3fs\n",
"import netCDF4"
"import netCDF4\n",
"\n",
"from module import adjust_pop_grid"
]
},
{
Expand Down Expand Up @@ -150,45 +152,6 @@
"ds_grid"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d1f1efc5-a878-4d00-bbac-d0185863bea3",
"metadata": {},
"outputs": [],
"source": [
"def adjust_pop_grid(tlon,tlat,field):\n",
" nj = tlon.shape[0]\n",
" ni = tlon.shape[1]\n",
" xL = int(ni/2 - 1)\n",
" xR = int(xL + ni)\n",
"\n",
" tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)\n",
" lon = np.concatenate((tlon,tlon+360.),1)\n",
" lon = lon[:,xL:xR]\n",
"\n",
" if ni == 320:\n",
" lon[367:-3,0] = lon[367:-3,0]+360.\n",
" lon = lon - 360.\n",
" lon = np.hstack((lon,lon[:,0:1]+360.))\n",
" if ni == 320:\n",
" lon[367:,-1] = lon[367:,-1] - 360.\n",
"\n",
" # Trick cartopy into doing the right thing:\n",
" # it gets confused when the cyclic coords are identical\n",
" lon[:,0] = lon[:,0]-1e-8\n",
" \n",
" # Periodicity\n",
" lat = np.concatenate((tlat,tlat),1)\n",
" lat = lat[:,xL:xR]\n",
" lat = np.hstack((lat,lat[:,0:1]))\n",
"\n",
" field = np.ma.concatenate((field,field),1)\n",
" field = field[:,xL:xR]\n",
" field = np.ma.hstack((field,field[:,0:1]))\n",
" return lon,lat,field"
]
},
{
"cell_type": "markdown",
"id": "2cdc4f48-ec2a-4f63-a309-53803f476f7b",
Expand Down
43 changes: 3 additions & 40 deletions notebooks/ocn-iron.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@
"from dask.distributed import LocalCluster\n",
"import pandas as pd\n",
"import s3fs\n",
"import netCDF4"
"import netCDF4\n",
"\n",
"from module import adjust_pop_grid"
]
},
{
Expand Down Expand Up @@ -132,45 +134,6 @@
"depths = ds_grid.z_t * 0.01"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "78660a8f-98bd-4f6a-b8f1-448d06787854",
"metadata": {},
"outputs": [],
"source": [
"def adjust_pop_grid(tlon,tlat,field):\n",
" nj = tlon.shape[0]\n",
" ni = tlon.shape[1]\n",
" xL = int(ni/2 - 1)\n",
" xR = int(xL + ni)\n",
"\n",
" tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)\n",
" lon = np.concatenate((tlon,tlon+360.),1)\n",
" lon = lon[:,xL:xR]\n",
"\n",
" if ni == 320:\n",
" lon[367:-3,0] = lon[367:-3,0]+360.\n",
" lon = lon - 360.\n",
" lon = np.hstack((lon,lon[:,0:1]+360.))\n",
" if ni == 320:\n",
" lon[367:,-1] = lon[367:,-1] - 360.\n",
"\n",
" # Trick cartopy into doing the right thing:\n",
" # it gets confused when the cyclic coords are identical\n",
" lon[:,0] = lon[:,0]-1e-8\n",
" \n",
" # Periodicity\n",
" lat = np.concatenate((tlat,tlat),1)\n",
" lat = lat[:,xL:xR]\n",
" lat = np.hstack((lat,lat[:,0:1]))\n",
"\n",
" field = np.ma.concatenate((field,field),1)\n",
" field = field[:,xL:xR]\n",
" field = np.ma.hstack((field,field[:,0:1]))\n",
" return lon,lat,field"
]
},
{
"cell_type": "markdown",
"id": "3630bab7-a9ea-4433-9eb8-f8555cab07bd",
Expand Down
43 changes: 3 additions & 40 deletions notebooks/ocn-macronuts.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,9 @@
"import pop_tools\n",
"from dask.distributed import LocalCluster\n",
"import s3fs\n",
"import netCDF4"
"import netCDF4\n",
"\n",
"from module import adjust_pop_grid"
]
},
{
Expand Down Expand Up @@ -157,45 +159,6 @@
"depths = ds_grid.z_t * 0.01"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5d56f6ba-7bdb-43bd-b8ad-badbf5a112b1",
"metadata": {},
"outputs": [],
"source": [
"def adjust_pop_grid(tlon,tlat,field):\n",
" nj = tlon.shape[0]\n",
" ni = tlon.shape[1]\n",
" xL = int(ni/2 - 1)\n",
" xR = int(xL + ni)\n",
"\n",
" tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)\n",
" lon = np.concatenate((tlon,tlon+360.),1)\n",
" lon = lon[:,xL:xR]\n",
"\n",
" if ni == 320:\n",
" lon[367:-3,0] = lon[367:-3,0]+360.\n",
" lon = lon - 360.\n",
" lon = np.hstack((lon,lon[:,0:1]+360.))\n",
" if ni == 320:\n",
" lon[367:,-1] = lon[367:,-1] - 360.\n",
"\n",
" # Trick cartopy into doing the right thing:\n",
" # it gets confused when the cyclic coords are identical\n",
" lon[:,0] = lon[:,0]-1e-8\n",
" \n",
" # Periodicity\n",
" lat = np.concatenate((tlat,tlat),1)\n",
" lat = lat[:,xL:xR]\n",
" lat = np.hstack((lat,lat[:,0:1]))\n",
"\n",
" field = np.ma.concatenate((field,field),1)\n",
" field = field[:,xL:xR]\n",
" field = np.ma.hstack((field,field[:,0:1]))\n",
" return lon,lat,field"
]
},
{
"cell_type": "markdown",
"id": "976ffa95-263e-4d11-bda3-0637f9d28212",
Expand Down
43 changes: 3 additions & 40 deletions notebooks/ocn-phyto-biomass.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,9 @@
"from dask.distributed import LocalCluster\n",
"import s3fs\n",
"import netCDF4\n",
"from datetime import datetime"
"from datetime import datetime\n",
"\n",
"from module import adjust_pop_grid"
]
},
{
Expand Down Expand Up @@ -146,45 +148,6 @@
"depths = ds_grid.z_t * 0.01"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7cf6b5c7-15fb-44a1-bc31-af0666433995",
"metadata": {},
"outputs": [],
"source": [
"def adjust_pop_grid(tlon,tlat,field):\n",
" nj = tlon.shape[0]\n",
" ni = tlon.shape[1]\n",
" xL = int(ni/2 - 1)\n",
" xR = int(xL + ni)\n",
"\n",
" tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)\n",
" lon = np.concatenate((tlon,tlon+360.),1)\n",
" lon = lon[:,xL:xR]\n",
"\n",
" if ni == 320:\n",
" lon[367:-3,0] = lon[367:-3,0]+360.\n",
" lon = lon - 360.\n",
" lon = np.hstack((lon,lon[:,0:1]+360.))\n",
" if ni == 320:\n",
" lon[367:,-1] = lon[367:,-1] - 360.\n",
"\n",
" # Trick cartopy into doing the right thing:\n",
" # it gets confused when the cyclic coords are identical\n",
" lon[:,0] = lon[:,0]-1e-8\n",
" \n",
" # Periodicity\n",
" lat = np.concatenate((tlat,tlat),1)\n",
" lat = lat[:,xL:xR]\n",
" lat = np.hstack((lat,lat[:,0:1]))\n",
"\n",
" field = np.ma.concatenate((field,field),1)\n",
" field = field[:,xL:xR]\n",
" field = np.ma.hstack((field,field[:,0:1]))\n",
" return lon,lat,field"
]
},
{
"cell_type": "markdown",
"id": "e2eda11b-15e6-471f-8d09-e07f62a1710d",
Expand Down
43 changes: 3 additions & 40 deletions notebooks/ocn-phyto-lims.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,9 @@
"import pop_tools\n",
"from dask.distributed import LocalCluster\n",
"import s3fs\n",
"import netCDF4"
"import netCDF4\n",
"\n",
"from module import adjust_pop_grid"
]
},
{
Expand Down Expand Up @@ -145,45 +147,6 @@
"depths = ds_grid.z_t * 0.01"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0b73860c-5a37-42b7-bd8c-7c841937e0b6",
"metadata": {},
"outputs": [],
"source": [
"def adjust_pop_grid(tlon,tlat,field):\n",
" nj = tlon.shape[0]\n",
" ni = tlon.shape[1]\n",
" xL = int(ni/2 - 1)\n",
" xR = int(xL + ni)\n",
"\n",
" tlon = np.where(np.greater_equal(tlon,min(tlon[:,0])),tlon-360.,tlon)\n",
" lon = np.concatenate((tlon,tlon+360.),1)\n",
" lon = lon[:,xL:xR]\n",
"\n",
" if ni == 320:\n",
" lon[367:-3,0] = lon[367:-3,0]+360.\n",
" lon = lon - 360.\n",
" lon = np.hstack((lon,lon[:,0:1]+360.))\n",
" if ni == 320:\n",
" lon[367:,-1] = lon[367:,-1] - 360.\n",
"\n",
" # Trick cartopy into doing the right thing:\n",
" # it gets confused when the cyclic coords are identical\n",
" lon[:,0] = lon[:,0]-1e-8\n",
" \n",
" # Periodicity\n",
" lat = np.concatenate((tlat,tlat),1)\n",
" lat = lat[:,xL:xR]\n",
" lat = np.hstack((lat,lat[:,0:1]))\n",
"\n",
" field = np.ma.concatenate((field,field),1)\n",
" field = field[:,xL:xR]\n",
" field = np.ma.hstack((field,field[:,0:1]))\n",
" return lon,lat,field"
]
},
{
"cell_type": "markdown",
"id": "652bc60d-d17e-4bbb-bfce-2b323281f444",
Expand Down
Loading

0 comments on commit a529c1d

Please sign in to comment.