Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support xcube viewer's pixel info box #1052

Merged
merged 4 commits into from
Jul 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,16 @@
is an arbitrary band-math expression,
see https://github.com/xcube-dev/xcube-viewer/issues/371.

* xcube server now allows for assigning a `GroupTitle` and a list of `Tags`
to a configured dataset. This feature has been added in order to support
grouping and filtering of datasets in UIs, see
* xcube server now allows for configuring new dataset properties
`GroupTitle` and `Tags` . This feature has been added in order to support
grouping and filtering of datasets in UIs,
see https://github.com/xcube-dev/xcube-viewer/issues/385.

* Added server endpoint `GET /statistics/{varName}` with query parameters
`lon`, `lat`, `time` which is used to extract single point data.
This feature has been added in order to support
https://github.com/xcube-dev/xcube-viewer/issues/404.

* The xcube server STAC API now publishes all fields available via the
`/datasets` endpoint. This includes colormap information for each asset such as
colorBarName, colorBarNorm, colorBarMin, colorBarMax, tileLevelMin, tileLevelMax.
Expand Down
19 changes: 15 additions & 4 deletions test/server/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import unittest
from typing import Optional

import pytest

from xcube.server.api import Api
from xcube.server.api import ApiContext
from xcube.server.api import ApiHandler
Expand All @@ -13,6 +15,7 @@
from xcube.server.api import ApiError
from xcube.server.server import ServerContext
from xcube.util.frozen import FrozenDict
from xcube.util.undefined import UNDEFINED
from .mocks import MockApiRequest
from .mocks import MockApiResponse
from .mocks import MockFramework
Expand Down Expand Up @@ -180,15 +183,12 @@ class DatasetHandler(ApiHandler):
def get(self):
return {}

self.assertFalse(
ApiRoute("datasets", "/datasets", DatasetHandler).slash
)
self.assertFalse(ApiRoute("datasets", "/datasets", DatasetHandler).slash)
self.assertTrue(
ApiRoute("datasets", "/datasets", DatasetHandler, slash=True).slash
)



class ApiContextTest(unittest.TestCase):
def test_basic_props(self):
config = {}
Expand Down Expand Up @@ -300,6 +300,17 @@ def test_query_arg_with_default(self):
self.assertEqual(True, request.get_query_arg("details", default=False))
self.assertEqual("CRS84", request.get_query_arg("crs", default="CRS84"))

def test_query_arg_with_undefined_default(self):
request = MockApiRequest(query_args=dict(details=["1"]))
self.assertEqual(
True, request.get_query_arg("details", type=bool, default=UNDEFINED)
)
with pytest.raises(
ApiError.BadRequest,
match="HTTP status 400: Missing required query parameter 'crs'",
):
request.get_query_arg("crs", default=UNDEFINED)


class ApiErrorTest(unittest.TestCase):
def test_base_class(self):
Expand Down
37 changes: 30 additions & 7 deletions test/webapi/statistics/test_controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def test_compute_statistics_for_point(self):
"demo",
"conc_tsm",
{"type": "Point", "coordinates": [lon, lat]},
{"time": time},
time,
)
self.assertIsInstance(result, dict)
self.assertEqual(
Expand All @@ -40,8 +40,20 @@ def test_compute_statistics_for_point(self):
},
result,
)
self.assertAlmostEqual(44.5496, result.get("mean"), places=4)
self.assertNotIn("histogram", result)

# Compact point mode
result = compute_statistics(
ctx,
"demo",
"conc_tsm",
(lon, lat),
time,
)
self.assertIsInstance(result, dict)
self.assertEqual(
{"value": expected_value},
result,
)

def test_compute_statistics_for_oor_point(self):
lon = -100 # --> out-of-range!
Expand All @@ -55,11 +67,22 @@ def test_compute_statistics_for_oor_point(self):
"demo",
"conc_tsm",
{"type": "Point", "coordinates": [lon, lat]},
{"time": time},
time,
)
self.assertIsInstance(result, dict)
self.assertEqual({"count": 0}, result)

# Compact point mode
result = compute_statistics(
ctx,
"demo",
"conc_tsm",
(lon, lat),
time,
)
self.assertIsInstance(result, dict)
self.assertEqual({}, result)

def test_compute_statistics_for_polygon(self):
lon = 1.768
lat = 51.465
Expand All @@ -84,7 +107,7 @@ def test_compute_statistics_for_polygon(self):
]
],
},
{"time": time},
time,
)
self.assertIsInstance(result, dict)
self.assertEqual(380, result.get("count"))
Expand Down Expand Up @@ -123,7 +146,7 @@ def test_compute_statistics_for_polygon_and_var_assignment(self):
]
],
},
{"time": time},
time,
)
self.assertIsInstance(result, dict)
self.assertEqual(380, result.get("count"))
Expand Down Expand Up @@ -162,7 +185,7 @@ def test_compute_statistics_for_oor_polygon(self):
]
],
},
{"time": time},
time,
)
self.assertIsInstance(result, dict)
self.assertEqual({"count": 0}, result)
4 changes: 3 additions & 1 deletion test/webapi/statistics/test_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ def test_fetch_statistics_missing_time(self):
method="POST",
body='{"type": "Point", "coordinates": [1.768, 51.465]}',
)
self.assertBadRequestResponse(response, "Missing query parameter 'time'")
self.assertBadRequestResponse(
response, "Missing required query parameter 'time'"
)

def test_fetch_statistics_invalid_geometry(self):
response = self.fetch(
Expand Down
21 changes: 15 additions & 6 deletions xcube/server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from ..util.assertions import assert_true
from ..util.frozen import FrozenDict
from ..util.jsonschema import JsonObjectSchema
from ..util.undefined import UNDEFINED

_SERVER_CONTEXT_ATTR_NAME = "__xcube_server_context"
_HTTP_METHODS = {"head", "get", "post", "put", "delete", "options"}
Expand Down Expand Up @@ -234,9 +235,9 @@ def route(self, path: str, slash: bool = False, **handler_kwargs):
"""

def decorator_func(handler_cls: type[ApiHandler]):
self._routes.append(ApiRoute(
self.name, path, handler_cls, handler_kwargs, slash
))
self._routes.append(
ApiRoute(self.name, path, handler_cls, handler_kwargs, slash)
)
return handler_cls

return decorator_func
Expand Down Expand Up @@ -589,6 +590,10 @@ def get_query_arg(
argument. If *type* is not given, but *default* is, then *type*
will be inferred from *default*.

New in 1.7: If *default* equals ``xcube.util.undefined.UNDEFINED``
and the query arguments are not given, ``ApiError.BadRequest``
will be raised.

Args:
name: The name of the argument
type: The requested data type. Must be a callable type, e.g.
Expand All @@ -598,11 +603,15 @@ def get_query_arg(
Returns:
The value of the query argument.
"""
if type is None and default is not None:
if type is None and default is not None and default != UNDEFINED:
type = builtin_type(default)
type = type if callable(type) else None
values = self.get_query_args(name, type=type)
return values[0] if values else default
if not values:
if default == UNDEFINED:
raise ApiError.BadRequest(f"Missing required query parameter {name!r}")
return default
return values[0]

# noinspection PyShadowingBuiltins
@abstractmethod
Expand Down Expand Up @@ -746,7 +755,7 @@ def __init__(
path: str,
handler_cls: type[ApiHandler],
handler_kwargs: Optional[dict[str, Any]] = None,
slash: bool = False
slash: bool = False,
):
assert_instance(api_name, str, name="api_name")
assert_instance(path, str, name="path")
Expand Down
58 changes: 32 additions & 26 deletions xcube/webapi/statistics/controllers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from collections.abc import Mapping
from typing import Any
from typing import Any, Union

import numpy as np
import xarray as xr
Expand All @@ -16,26 +16,22 @@


NAN_RESULT = {"count": 0}
NAN_RESULT_COMPACT = {}
DEFAULT_BIN_COUNT = 100


def compute_statistics(
ctx: StatisticsContext,
ds_id: str,
var_name: str,
geo_json: dict[str, Any],
params: Mapping[str, str],
geometry: Union[dict[str, Any], tuple[float, float]],
time_label: str,
trace_perf: bool = False,
):
params = dict(params)
try:
time_label = params.pop("time")
except KeyError:
raise ApiError.BadRequest("Missing query parameter 'time'")
trace_perf = params.pop("debug", "1" if ctx.datasets_ctx.trace_perf else "0") == "1"
measure_time = measure_time_cm(logger=LOG, disabled=not trace_perf)
with measure_time("Computing statistics"):
return _compute_statistics(
ctx, ds_id, var_name, time_label, geo_json, DEFAULT_BIN_COUNT
ctx, ds_id, var_name, time_label, geometry, DEFAULT_BIN_COUNT
)


Expand All @@ -44,7 +40,7 @@ def _compute_statistics(
ds_id: str,
var_name_or_assign: str,
time_label: str,
geo_json: dict[str, Any],
geometry: Union[dict[str, Any], tuple[float, float]],
bin_count: int,
):
ml_dataset = ctx.datasets_ctx.get_ml_dataset(ds_id)
Expand All @@ -56,40 +52,50 @@ def _compute_statistics(
except (TypeError, ValueError) as e:
raise ApiError.BadRequest("Invalid 'time'") from e

try:
geometry = shapely.geometry.shape(geo_json)
except (TypeError, ValueError, AttributeError) as e:
raise ApiError.BadRequest("Invalid GeoJSON geometry encountered") from e
if isinstance(geometry, tuple):
compact_mode = True
geometry = shapely.geometry.Point(geometry)
else:
compact_mode = False
try:
geometry = shapely.geometry.shape(geometry)
except (TypeError, ValueError, AttributeError) as e:
raise ApiError.BadRequest("Invalid GeoJSON geometry encountered") from e

nan_result = NAN_RESULT_COMPACT if compact_mode else NAN_RESULT

dataset = dataset.sel(time=time, method="nearest")

x_name, y_name = grid_mapping.xy_dim_names
if isinstance(geometry, shapely.geometry.Point):
bounds = get_dataset_geometry(dataset)
if not bounds.contains(geometry):
return NAN_RESULT
return nan_result
indexers = {x_name: geometry.x, y_name: geometry.y}
variable = _get_dataset_variable(var_name_or_assign, dataset)
value = variable.sel(**indexers, method="Nearest").values
if np.isnan(value):
return NAN_RESULT
return {
"count": 1,
"minimum": float(value),
"maximum": float(value),
"mean": float(value),
"deviation": 0.0,
}
return nan_result
if compact_mode:
return {"value": float(value)}
else:
return {
"count": 1,
"minimum": float(value),
"maximum": float(value),
"mean": float(value),
"deviation": 0.0,
}

dataset = mask_dataset_by_geometry(dataset, geometry)
if dataset is None:
return NAN_RESULT
return nan_result

variable = _get_dataset_variable(var_name_or_assign, dataset)

count = int(np.count_nonzero(~np.isnan(variable)))
if count == 0:
return NAN_RESULT
return nan_result

# note, casting to float forces intended computation
minimum = float(variable.min())
Expand Down
Loading
Loading