Skip to content

Commit

Permalink
chore(comments)
Browse files Browse the repository at this point in the history
  • Loading branch information
nicolasK committed Jul 22, 2024
1 parent 170bd1b commit 32296a9
Showing 1 changed file with 15 additions and 4 deletions.
19 changes: 15 additions & 4 deletions earthdaily/earthdatastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -874,17 +874,22 @@ def datacube(
DESCRIPTION.
"""

# Properties (per items) are not compatible with groupby_date.
if properties not in (None, False) and groupby_date is not None:
raise NotImplementedError(
"You must set `groupby_date=None` to have properties per item."
)

# convert collections to list
collections = [collections] if isinstance(collections, str) else collections

if isinstance(collections, str):
collections = [collections]

# if intersects a geometry, create a GeoDataFRame
if intersects is not None:
intersects = cube_utils.GeometryManager(intersects).to_geopandas()
self.intersects = intersects

# if mask_with, need to add assets or to get mask item id
if mask_with:
if mask_with not in mask._available_masks:
raise NotImplementedError(
Expand Down Expand Up @@ -919,6 +924,7 @@ def datacube(
elif isinstance(assets, dict):
assets[sensor_mask] = sensor_mask

# query the items
items = self.search(
collections=collections,
bbox=bbox,
Expand Down Expand Up @@ -950,7 +956,8 @@ def datacube(
raise Warning(
"No cross calibration coefficient available for the specified collections."
)
kwargs.setdefault("dtype", "float32")

# Create datacube from items
xr_datacube = datacube(
items,
intersects=intersects,
Expand All @@ -962,6 +969,8 @@ def datacube(
groupby_date=None,
**kwargs,
)

# Create mask datacube and apply it to xr_datacube
if mask_with:
kwargs["dtype"] = "int8"
if "geobox" not in kwargs:
Expand Down Expand Up @@ -1021,11 +1030,13 @@ def datacube(
Mask = mask.Mask(xr_datacube, intersects=intersects, bbox=bbox)
xr_datacube = getattr(Mask, mask_with)(**mask_kwargs)

# keep only one value per pixel per day
if groupby_date:
xr_datacube = xr_datacube.groupby("time.date", restore_coord_dims=True)
xr_datacube = getattr(xr_datacube, groupby_date)().rename(dict(date="time"))
xr_datacube["time"] = xr_datacube.time.astype("M8[ns]")

# To filter by cloud_cover / clear_cover, we need to compute clear pixels as field level
if clear_cover or mask_statistics:
xy = xr_datacube[mask_with].isel(time=0).size

Expand Down

0 comments on commit 32296a9

Please sign in to comment.