diff --git a/apis/python/src/tiledbsoma/__init__.py b/apis/python/src/tiledbsoma/__init__.py index cc344ce5fe..cedef3531d 100644 --- a/apis/python/src/tiledbsoma/__init__.py +++ b/apis/python/src/tiledbsoma/__init__.py @@ -177,6 +177,7 @@ from ._measurement import Measurement from ._multiscale_image import MultiscaleImage from ._point_cloud_dataframe import PointCloudDataFrame +from ._scene import Scene from ._sparse_nd_array import SparseNDArray, SparseNDArrayRead from .options import SOMATileDBContext, TileDBCreateOptions, TileDBWriteOptions from .pytiledbsoma import ( @@ -215,6 +216,7 @@ "open", "PointCloudDataFrame", "ResultOrder", + "Scene", "show_package_versions", "SOMA_JOINID", "SOMAError", diff --git a/apis/python/src/tiledbsoma/_factory.py b/apis/python/src/tiledbsoma/_factory.py index 7bdb4044f3..bf5ec7d2f5 100644 --- a/apis/python/src/tiledbsoma/_factory.py +++ b/apis/python/src/tiledbsoma/_factory.py @@ -1,5 +1,5 @@ -# Copyright (c) 2021-2023 The Chan Zuckerberg Initiative Foundation -# Copyright (c) 2021-2023 TileDB, Inc. +# Copyright (c) 2021-2024 The Chan Zuckerberg Initiative Foundation +# Copyright (c) 2021-2024 TileDB, Inc. # # Licensed under the MIT License. @@ -30,6 +30,7 @@ _measurement, _multiscale_image, _point_cloud_dataframe, + _scene, _soma_object, _sparse_nd_array, _tdb_handles, @@ -219,6 +220,7 @@ def _type_name_to_cls(type_name: str) -> Type[AnySOMAObject]: _measurement.Measurement, _multiscale_image.MultiscaleImage, _sparse_nd_array.SparseNDArray, + _scene.Scene, _point_cloud_dataframe.PointCloudDataFrame, ) } diff --git a/apis/python/src/tiledbsoma/_scene.py b/apis/python/src/tiledbsoma/_scene.py index e4b5b254be..33d70ded47 100644 --- a/apis/python/src/tiledbsoma/_scene.py +++ b/apis/python/src/tiledbsoma/_scene.py @@ -6,22 +6,25 @@ Implementation of a SOMA Scene """ -from typing import Any, Optional, Sequence, Tuple, Union +from typing import Any, Optional, Sequence, Union -import pyarrow as pa import somacore -from somacore import ( - CoordinateSpace, - CoordinateTransform, - options, -) +from somacore import Axis, CoordinateSpace, CoordinateTransform, IdentityTransform -from ._collection import CollectionBase -from ._constants import SOMA_GEOMETRY, SOMA_JOINID +from . import _funcs, _tdb_handles +from ._collection import Collection, CollectionBase +from ._constants import SOMA_COORDINATE_SPACE_METADATA_KEY +from ._exception import SOMAError from ._geometry_dataframe import GeometryDataFrame from ._multiscale_image import MultiscaleImage from ._point_cloud_dataframe import PointCloudDataFrame from ._soma_object import AnySOMAObject +from ._spatial_util import ( + coordinate_space_from_json, + coordinate_space_to_json, + transform_from_json, + transform_to_json, +) class Scene( # type: ignore[misc] # __eq__ false positive @@ -37,7 +40,8 @@ class Scene( # type: ignore[misc] # __eq__ false positive Experimental. """ - __slots__ = () + __slots__ = ("_coord_space",) + _wrapper_type = _tdb_handles.SceneWrapper _subclass_constrained_soma_types = { "img": ("SOMACollection",), @@ -45,6 +49,18 @@ class Scene( # type: ignore[misc] # __eq__ false positive "varl": ("SOMACollection",), } + def __init__( + self, + handle: _tdb_handles.SOMAGroupWrapper[Any], + **kwargs: Any, + ): + super().__init__(handle, **kwargs) + coord_space = self.metadata.get(SOMA_COORDINATE_SPACE_METADATA_KEY) + if coord_space is None: + self._coord_space: Optional[CoordinateSpace] = None + else: + self._coord_space = coordinate_space_from_json(coord_space) + @property def coordinate_space(self) -> Optional[CoordinateSpace]: """Coordinate system for this scene. @@ -52,12 +68,20 @@ def coordinate_space(self) -> Optional[CoordinateSpace]: Lifecycle: Experimental. """ - raise NotImplementedError() + return self._coord_space @coordinate_space.setter def coordinate_space(self, value: CoordinateSpace) -> None: - raise NotImplementedError() - + if not isinstance(value, CoordinateSpace): + raise TypeError(f"Invalid type {type(value).__name__}.") + self.metadata[SOMA_COORDINATE_SPACE_METADATA_KEY] = coordinate_space_to_json( + value + ) + self._coord_space = value + + @_funcs.forwards_kwargs_to( + GeometryDataFrame.create, exclude=("context", "tiledb_timestamp") + ) def add_geometry_dataframe( self, key: str, @@ -65,12 +89,7 @@ def add_geometry_dataframe( transform: Optional[CoordinateTransform], *, uri: str, - schema: pa.Schema, - index_column_names: Sequence[str] = (SOMA_JOINID, SOMA_GEOMETRY), - axis_names: Sequence[str] = ("x", "y"), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + **kwargs: Any, ) -> GeometryDataFrame: """Adds a ``GeometryDataFrame`` to the scene and sets a coordinate transform between the scene and the dataframe. @@ -97,6 +116,9 @@ def add_geometry_dataframe( """ raise NotImplementedError() + @_funcs.forwards_kwargs_to( + MultiscaleImage.create, exclude=("context", "tiledb_timestamp") + ) def add_multiscale_image( self, key: str, @@ -104,13 +126,10 @@ def add_multiscale_image( transform: Optional[CoordinateTransform], *, uri: str, - type: pa.DataType, - reference_level_shape: Sequence[int], - axis_names: Sequence[str] = ("c", "y", "x"), - axis_types: Sequence[str] = ("channel", "height", "width"), + **kwargs: Any, ) -> MultiscaleImage: """Adds a ``MultiscaleImage`` to the scene and sets a coordinate transform - between the scene and the multiscale image. + between the scene and the dataframe. Parameters are as in :meth:`spatial.MultiscaleImage.create`. See :meth:`add_new_collection` for details about child URIs. @@ -128,6 +147,9 @@ def add_multiscale_image( """ raise NotImplementedError() + @_funcs.forwards_kwargs_to( + PointCloudDataFrame.create, exclude=("context", "tiledb_timestamp") + ) def add_new_point_cloud_dataframe( self, key: str, @@ -135,20 +157,16 @@ def add_new_point_cloud_dataframe( transform: Optional[CoordinateTransform], *, uri: Optional[str] = None, - schema: pa.Schema, - index_column_names: Sequence[str] = (SOMA_JOINID,), - axis_names: Sequence[str] = ("x", "y"), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, + **kwargs: Any, ) -> PointCloudDataFrame: - """Adds a point cloud to the scene and sets a coordinate transform - between the scene and the dataframe. + """Adds a point cloud dataframe to the scene and sets a coordinate + transform between the scene and the dataframe. Parameters are as in :meth:`spatial.PointCloudDataFrame.create`. See :meth:`add_new_collection` for details about child URIs. Args: - key: The name of the point cloud dataframe. + key: The name of the geometry dataframe. transform: The coordinate transformation from the scene to the dataframe. subcollection: The name, or sequence of names, of the subcollection the dataframe is stored in. Defaults to ``'obsl'``. @@ -176,7 +194,7 @@ def set_transform_to_geometry_dataframe( to set a transformation for geometry dataframe named "transcripts" in the "var/RNA" collection:: - scene.set_transfrom_to_geometry_dataframe( + scene.set_transform_to_geometry_dataframe( 'transcripts', transform, subcollection=['var', 'RNA'], ) @@ -224,7 +242,56 @@ def set_transform_to_multiscale_image( Lifecycle: experimental """ - raise NotImplementedError() + if not isinstance(subcollection, str): + raise NotImplementedError() + + # Check the transform matches this + if self.coordinate_space is None: + raise SOMAError( + "The scene coordinate space must be set before registering an image." + ) + if transform.output_axes != self.coordinate_space.axis_names: + raise ValueError( + f"The name of the transform output axes, {transform.output_axes}, do " + f"not match the name of the axes in the scene coordinate space, " + f"{self.coordinate_space.axis_names}." + ) + + # Create the coordinate space if it does not exist. Otherwise, check it is + # compatible with the provide transform. + if coordinate_space is None: + if isinstance(transform, IdentityTransform): + coordinate_space = self.coordinate_space + else: + # mypy false positive https://github.com/python/mypy/issues/5313 + coordinate_space = CoordinateSpace( + tuple(Axis(name=axis_name) for axis_name in transform.input_axes) # type: ignore[misc] + ) + else: + if transform.input_axes != coordinate_space.axis_names: + raise ValueError( + f"The name of the transform input axes, {transform.input_axes}, do " + f"not match the name of the axes in the provided coordinate space, " + f"{coordinate_space.axis_names}." + ) + + # Check asset exists in the specified location. + try: + coll: Collection = self[subcollection] # type: ignore + except KeyError as ke: + raise KeyError(f"No collection '{subcollection}' in this scene.") from ke + try: + image: MultiscaleImage = coll[key] + except KeyError as ke: + raise KeyError( + f"No multiscale image named '{key}' in '{subcollection}'." + ) from ke + if not isinstance(image, MultiscaleImage): + raise TypeError(f"'{key}' in '{subcollection}' is not an MultiscaleImage.") + + image.coordinate_space = coordinate_space + coll.metadata[f"soma_scene_registry_{key}"] = transform_to_json(transform) + return image def set_transform_to_point_cloud_dataframe( self, @@ -237,30 +304,71 @@ def set_transform_to_point_cloud_dataframe( """Adds the coordinate transform for the scene coordinate space to a point cloud dataframe stored in the scene. - If the subcollection the point cloud is inside of is more than one + If the subcollection the point cloud dataframe is inside of is more than one layer deep, the input should be provided as a sequence of names. For example, to set a transform for a point named `transcripts` in the `var/RNA` - collection:: + collection: - scene.set_transformation_to_point_cloud_dataframe( + scene.set_transform_to_point_cloud_dataframe( 'transcripts', transform, subcollection=['var', 'RNA'], ) Args: - key: The name of the point cloud. - transform: The coordinate transformation from the scene to the point cloud. + key: The name of the point cloud dataframe. + transform: The coordinate transformation from the scene to the dataframe. subcollection: The name, or sequence of names, of the subcollection the - point cloud is stored in. Defaults to ``'obsl'``. - coordinate_space: Optional coordinate space for the point cloud. This will - replace the existing coordinate space of the point cloud. Defaults to + dataframe is stored in. Defaults to ``'obsl'``. + coordinate_space: Optional coordinate space for the dataframe. This will + replace the existing coordinate space of the dataframe. Defaults to ``None``. Returns: - The point cloud, opened for writing. + The point cloud dataframe, opened for writing. Lifecycle: experimental """ - raise NotImplementedError() + if not isinstance(subcollection, str): + raise NotImplementedError() + if self.coordinate_space is None: + raise SOMAError( + "The scene coordinate space must be set before registering a point " + "cloud dataframe." + ) + # Create the coordinate space if it does not exist. Otherwise, check it is + # compatible with the provide transform. + if coordinate_space is None: + if isinstance(transform, IdentityTransform): + coordinate_space = self.coordinate_space + else: + # mypy false positive https://github.com/python/mypy/issues/5313 + coordinate_space = CoordinateSpace( + tuple(Axis(name=axis_name) for axis_name in transform.input_axes) # type: ignore[misc] + ) + else: + if transform.input_axes != coordinate_space.axis_names: + raise ValueError( + f"The name of the transform input axes, {transform.input_axes}, do " + f"not match the name of the axes in the provided coordinate space, " + f"{coordinate_space.axis_names}." + ) + + # Check asset exists in the specified location. + try: + coll: Collection = self[subcollection] # type: ignore + except KeyError as ke: + raise KeyError(f"No collection '{subcollection}' in this scene.") from ke + try: + point_cloud: PointCloudDataFrame = coll[key] + except KeyError as ke: + raise KeyError(f"No PointCloudDataFrame named '{key}' in '{coll}'.") from ke + if not isinstance(point_cloud, PointCloudDataFrame): + raise TypeError( + f"'{key}' in '{subcollection}' is not an PointCloudDataFrame." + ) + + point_cloud.coordinate_space = coordinate_space + coll.metadata[f"soma_scene_registry_{key}"] = transform_to_json(transform) + return point_cloud def get_transform_from_geometry_dataframe( self, key: str, *, subcollection: Union[str, Sequence[str]] = "obsl" @@ -339,7 +447,20 @@ def get_transform_to_geometry_dataframe( Lifecycle: experimental """ - raise NotImplementedError() + if not isinstance(subcollection, str): + raise NotImplementedError() + try: + coll: Collection = self[subcollection] # type: ignore + except KeyError as ke: + raise KeyError(f"No collection '{subcollection}' in this scene.") from ke + try: + transform_json = coll.metadata[f"soma_scene_registry_{key}"] + except KeyError as ke: + raise KeyError( + f"No coordinate space registry for '{key}' in collection " + f"'{subcollection}'." + ) from ke + return transform_from_json(transform_json) def get_transform_to_multiscale_image( self, @@ -364,22 +485,62 @@ def get_transform_to_multiscale_image( Lifecycle: experimental """ - raise NotImplementedError() + if not isinstance(subcollection, str): + raise NotImplementedError() + try: + coll: Collection = self[subcollection] # type: ignore + except KeyError as ke: + raise KeyError(f"No collection '{subcollection}' in this scene.") from ke + try: + transform_json = coll.metadata[f"soma_scene_registry_{key}"] + except KeyError: + raise KeyError( + f"No coordinate space registry for '{key}' in collection " + f"'{subcollection}'" + ) + base_transform = transform_from_json(transform_json) + if level is None: + return base_transform + try: + image: MultiscaleImage = coll[key] + except KeyError as ke: + raise KeyError( + f"No MultiscaleImage named '{key}' in '{subcollection}'." + ) from ke + if isinstance(level, str): + raise NotImplementedError( + "Support for querying image level by name is not yet implemented." + ) + level_transform = image.get_transform_to_level(level) + return level_transform @ base_transform def get_transform_to_point_cloud_dataframe( self, key: str, *, subcollection: str = "obsl" ) -> CoordinateTransform: """Returns the coordinate transformation from the scene to a requested - point cloud. + point cloud dataframe. Args: - key: The name of the point cloud. + key: The name of the point cloud dataframe. subcollection: The name, or sequence of names, of the subcollection the - point cloud dataframe is stored in. Defaults to ``'obsl'``. + dataframe is stored in. Defaults to ``'obsl'``. Returns: - Coordinate transform from the scene to the requested point cloud. + Coordinate transform from the scene to the point cloud dataframe. Lifecycle: experimental """ - raise NotImplementedError() + if not isinstance(subcollection, str): + raise NotImplementedError() + try: + coll: Collection = self[subcollection] # type: ignore + except KeyError as ke: + raise KeyError(f"No collection '{subcollection}' in this scene.") from ke + try: + transform_json = coll.metadata[f"soma_scene_registry_{key}"] + except KeyError as ke: + raise KeyError( + f"No coordinate space registry for '{key}' in collection " + f"'{subcollection}'." + ) from ke + return transform_from_json(transform_json) diff --git a/apis/python/src/tiledbsoma/_soma_object.py b/apis/python/src/tiledbsoma/_soma_object.py index d6c306743e..71fae027ee 100644 --- a/apis/python/src/tiledbsoma/_soma_object.py +++ b/apis/python/src/tiledbsoma/_soma_object.py @@ -46,6 +46,7 @@ class SOMAObject(somacore.SOMAObject, Generic[_WrapperType_co]): Type[_tdb_handles.CollectionWrapper], Type[_tdb_handles.ExperimentWrapper], Type[_tdb_handles.MeasurementWrapper], + Type[_tdb_handles.SceneWrapper], Type[_tdb_handles.MultiscaleImageWrapper], ] """Class variable of the Wrapper class used to open this object type.""" diff --git a/apis/python/src/tiledbsoma/_tdb_handles.py b/apis/python/src/tiledbsoma/_tdb_handles.py index f910502801..d18a686351 100644 --- a/apis/python/src/tiledbsoma/_tdb_handles.py +++ b/apis/python/src/tiledbsoma/_tdb_handles.py @@ -1,5 +1,5 @@ -# Copyright (c) 2021-2023 The Chan Zuckerberg Initiative Foundation -# Copyright (c) 2021-2023 TileDB, Inc. +# Copyright (c) 2021-2024 The Chan Zuckerberg Initiative Foundation +# Copyright (c) 2021-2024 TileDB, Inc. # # Licensed under the MIT License. @@ -48,6 +48,7 @@ clib.SOMACollection, clib.SOMAMeasurement, clib.SOMAExperiment, + clib.SOMAScene, clib.SOMAMultiscaleImage, ] _RawHdl_co = TypeVar("_RawHdl_co", bound=RawHandle, covariant=True) @@ -85,6 +86,7 @@ def open( "somacollection": CollectionWrapper, "somaexperiment": ExperimentWrapper, "somameasurement": MeasurementWrapper, + "somascene": SceneWrapper, "somamultiscaleimage": MultiscaleImageWrapper, } @@ -93,7 +95,7 @@ def open( soma_object, context ) except KeyError: - if soma_object.type.lower() in {"somascene", "somageometrydataframe"}: + if soma_object.type.lower() == "somageometrydataframe": raise NotImplementedError( f"Support for {soma_object.type!r} is not yet implemented." ) @@ -326,6 +328,12 @@ class MultiscaleImageWrapper(SOMAGroupWrapper[clib.SOMAMultiscaleImage]): _GROUP_WRAPPED_TYPE = clib.SOMAMultiscaleImage +class SceneWrapper(SOMAGroupWrapper[clib.SOMAScene]): + """Wrapper around a Pybind11 SceneWrapper handle.""" + + _GROUP_WRAPPED_TYPE = clib.SOMAScene + + _ArrType = TypeVar("_ArrType", bound=clib.SOMAArray) @@ -519,7 +527,7 @@ def resize_soma_joinid(self, newshape: int) -> None: class PointCloudDataFrameWrapper(SOMAArrayWrapper[clib.SOMAPointCloudDataFrame]): - """Wrapper around a Pybind11 SOMADataFrame handle.""" + """Wrapper around a Pybind11 SOMAPointCloudDataFrame handle.""" _ARRAY_WRAPPED_TYPE = clib.SOMAPointCloudDataFrame diff --git a/apis/python/src/tiledbsoma/soma_collection.cc b/apis/python/src/tiledbsoma/soma_collection.cc index 38e6522e32..7b9c5c80da 100644 --- a/apis/python/src/tiledbsoma/soma_collection.cc +++ b/apis/python/src/tiledbsoma/soma_collection.cc @@ -75,6 +75,9 @@ void load_soma_collection(py::module& m) { py::class_( m, "SOMAMeasurement"); + py::class_( + m, "SOMAScene"); + py::class_( m, "SOMAMultiscaleImage"); } diff --git a/apis/python/src/tiledbsoma/soma_object.cc b/apis/python/src/tiledbsoma/soma_object.cc index 09b6a54048..1156f7148d 100644 --- a/apis/python/src/tiledbsoma/soma_object.cc +++ b/apis/python/src/tiledbsoma/soma_object.cc @@ -91,6 +91,8 @@ void load_soma_object(py::module& m) { else if (soma_obj_type == "somameasurement") return py::cast( dynamic_cast(*soma_obj)); + else if (soma_obj_type == "somascene") + return py::cast(dynamic_cast(*soma_obj)); else if (soma_obj_type == "somamultiscaleimage") return py::cast( dynamic_cast(*soma_obj)); diff --git a/apis/python/tests/test_scene.py b/apis/python/tests/test_scene.py new file mode 100644 index 0000000000..6875c4575d --- /dev/null +++ b/apis/python/tests/test_scene.py @@ -0,0 +1,352 @@ +import json +from urllib.parse import urljoin + +import numpy as np +import pyarrow as pa +import pytest +import typeguard + +import tiledbsoma as soma + + +def create_and_populate_df(uri: str) -> soma.DataFrame: + obs_arrow_schema = pa.schema( + [ + ("foo", pa.int32()), + ("bar", pa.float64()), + ("baz", pa.large_string()), + ] + ) + + with soma.DataFrame.create(uri, schema=obs_arrow_schema) as obs: + pydict = {} + pydict["soma_joinid"] = [0, 1, 2, 3, 4] + pydict["foo"] = [10, 20, 30, 40, 50] + pydict["bar"] = [4.1, 5.2, 6.3, 7.4, 8.5] + pydict["baz"] = ["apple", "ball", "cat", "dog", "egg"] + rb = pa.Table.from_pydict(pydict) + obs.write(rb) + + return soma.DataFrame.open(uri) + + +def test_scene_basic(tmp_path): + baseuri = tmp_path.as_uri() + + with soma.Scene.create(baseuri) as scene: + assert scene.uri == baseuri + + with pytest.raises(TypeError): + scene["obsl"] = soma.Experiment.create(urljoin(baseuri, "obs")) + obsl_uri = urljoin(baseuri, "obsl") + scene["obsl"] = soma.Collection.create(obsl_uri) + scene["obsl"]["df"] = create_and_populate_df(urljoin(obsl_uri, "df")) + + with pytest.raises(TypeError): + scene["varl"] = soma.Measurement.create(urljoin(baseuri, "var")) + varl_uri = urljoin(baseuri, "varl") + scene["varl"] = soma.Collection.create(varl_uri) + scene["varl"]["sparse"] = soma.SparseNDArray.create( + urljoin(varl_uri, "sparse"), type=pa.int64(), shape=(10,) + ) + scene["varl"]["dense"] = soma.DenseNDArray.create( + urljoin(varl_uri, "dense"), type=pa.int64(), shape=(10,) + ) + + img_uri = urljoin(baseuri, "img") + scene["img"] = soma.Collection.create(img_uri) + scene["img"]["col"] = soma.Collection.create(urljoin(img_uri, "col")) + + assert not soma.Collection.exists(baseuri) + assert soma.Scene.exists(baseuri) + assert soma.Collection.exists(obsl_uri) + assert soma.Collection.exists(varl_uri) + assert soma.Collection.exists(img_uri) + assert soma.Measurement.exists(urljoin(baseuri, "var")) + assert soma.SparseNDArray.exists(urljoin(varl_uri, "sparse")) + assert soma.DenseNDArray.exists(urljoin(varl_uri, "dense")) + assert soma.Collection.exists(urljoin(img_uri, "col")) + + with soma.Scene.open(baseuri) as scene: + assert scene is not None + assert scene.obsl is not None + assert scene.obsl["df"] is not None + assert scene.varl is not None + assert scene.varl["sparse"] is not None + assert scene.varl["dense"] is not None + assert scene.img is not None + assert scene.img["col"] is not None + + assert len(scene) == 3 + assert scene.soma_type == "SOMAScene" + + assert scene.obsl == scene["obsl"] + assert len(scene.obsl) == 1 + assert scene.obsl["df"] == scene["obsl"]["df"] + + assert scene.varl == scene["varl"] + assert len(scene.varl) == 2 + assert scene.varl["sparse"] == scene["varl"]["sparse"] + assert scene.varl["dense"] == scene["varl"]["dense"] + + assert scene.img == scene["img"] + assert len(scene.img) == 1 + assert scene.img["col"] == scene["img"]["col"] + + with pytest.raises(soma.DoesNotExistError): + soma.Scene.open("bad uri") + + +def test_measurement_with_var_scene(tmp_path): + baseuri = tmp_path.as_uri() + obs_scene_uri = urljoin(baseuri, "obs_scene") + + with soma.Measurement.create(baseuri) as mea: + with pytest.raises(TypeError): + mea["obs_scene"] = soma.SparseNDArray.create(obs_scene_uri) + mea["obs_scene"] = create_and_populate_df(obs_scene_uri) + + assert soma.Measurement.exists(baseuri) + assert soma.DataFrame.exists(obs_scene_uri) + + +def test_scene_coord_space(tmp_path): + uri = tmp_path.as_uri() + + coord_space = soma.CoordinateSpace( + [ + soma.Axis(name="x"), + soma.Axis(name="y"), + ] + ) + coord_space_json = """ + [ + {"name": "x", "unit": null}, + {"name": "y", "unit": null} + ] + """ + + with soma.Scene.create(uri) as scene: + assert scene.coordinate_space is None + assert "soma_coordinate_space" not in scene.metadata + + # Setter only takes in CoordinateSpace + with pytest.raises(typeguard.TypeCheckError): + scene.coordinate_space = None + with pytest.raises(typeguard.TypeCheckError): + scene.coordinate_space = [soma.Axis(name="x"), soma.Axis(name="y")] + + # Reserved metadata key should not be settable? + # with pytest.raises(soma.SOMAError): + # scene.metadata["soma_coordinate_space"] = coord_space_json + + scene.coordinate_space = coord_space + assert scene.coordinate_space == coord_space + assert json.loads(scene.metadata["soma_coordinate_space"]) == json.loads( + coord_space_json + ) + + with soma.Scene.open(uri) as scene: + assert scene.coordinate_space == coord_space + + +@pytest.mark.parametrize( + "coord_transform, transform_kwargs", + [ + (soma.AffineTransform, {"matrix": [[1, 0, 0], [0, 1, 0], [0, 0, 1]]}), + (soma.ScaleTransform, {"scale_factors": [1, 1]}), + (soma.UniformScaleTransform, {"scale": 1}), + (soma.IdentityTransform, {}), + ], +) +def test_scene_point_cloud(tmp_path, coord_transform, transform_kwargs): + baseuri = urljoin(f"{tmp_path.as_uri()}/", "test_scene_point_cloud") + + with soma.Scene.create(baseuri) as scene: + obsl_uri = urljoin(baseuri, "obsl") + scene["obsl"] = soma.Collection.create(obsl_uri) + + ptc_uri = urljoin(obsl_uri, "ptc") + asch = pa.schema([("x", pa.float64()), ("y", pa.float64())]) + coord_space = soma.CoordinateSpace([soma.Axis(name="x"), soma.Axis(name="y")]) + + # TODO replace with Scene.add_new_point_cloud_dataframe when implemented + scene["obsl"]["ptc"] = soma.PointCloudDataFrame.create(ptc_uri, schema=asch) + + transform = coord_transform( + input_axes=("x", "y"), output_axes=("x", "y"), **transform_kwargs + ) + + # The scene coordinate space must be set before registering + with pytest.raises(soma.SOMAError): + scene.set_transform_to_point_cloud_dataframe("ptc", transform) + + scene.coordinate_space = coord_space + + # No SOMAObject named 'bad' in Scene + with pytest.raises(KeyError): + scene.set_transform_to_point_cloud_dataframe("bad", transform) + + # Not a PointCloudDataFrame + scene["obsl"]["col"] = soma.Collection.create(urljoin(obsl_uri, "col")) + with pytest.raises(typeguard.TypeCheckError): + scene.set_transform_to_point_cloud_dataframe("col", transform) + + # Transform not set + with pytest.raises(KeyError): + scene.get_transform_to_point_cloud_dataframe("ptc") + + scene.set_transform_to_point_cloud_dataframe("ptc", transform) + + ptc_transform = scene.get_transform_to_point_cloud_dataframe("ptc") + if isinstance(coord_transform, soma.AffineTransform): + assert np.array_equal( + ptc_transform.augmented_matrix, + transform.augmented_matrix, + ) + elif isinstance(coord_transform, soma.ScaleTransform): + assert np.array_equal( + ptc_transform.scale_factors, + transform.scale_factors, + ) + elif isinstance( + coord_transform, (soma.UniformScaleTransform, soma.IdentityTransform) + ): + assert ptc_transform.scale == transform.scale + + +@pytest.mark.parametrize( + "coord_transform, transform_kwargs", + [ + (soma.AffineTransform, {"matrix": [[1, 0, 0], [0, 1, 0], [0, 0, 1]]}), + (soma.ScaleTransform, {"scale_factors": [1, 1]}), + (soma.UniformScaleTransform, {"scale": 1}), + (soma.IdentityTransform, {}), + ], +) +def test_scene_multiscale_image(tmp_path, coord_transform, transform_kwargs): + baseuri = urljoin(f"{tmp_path.as_uri()}/", "test_scene_multiscale_image") + + with soma.Scene.create(baseuri) as scene: + obsl_uri = urljoin(baseuri, "obsl") + scene["obsl"] = soma.Collection.create(obsl_uri) + + img_uri = urljoin(baseuri, "img") + scene["img"] = soma.Collection.create(img_uri) + + msi_uri = urljoin(img_uri, "msi") + coord_space = soma.CoordinateSpace([soma.Axis(name="x"), soma.Axis(name="y")]) + + # TODO replace with Scene.add_multiscale_image when implemented + scene["img"]["msi"] = soma.MultiscaleImage.create( + msi_uri, type=pa.int64(), reference_level_shape=[1, 2, 3] + ) + + transform = coord_transform( + input_axes=("x", "y"), + output_axes=("x", "y"), + **transform_kwargs, + ) + + # The scene coordinate space must be set before registering + with pytest.raises(soma.SOMAError): + scene.set_transform_to_multiscale_image("msi", transform) + + scene.coordinate_space = coord_space + + # No MultiscaleImage named 'bad' in Scene + with pytest.raises(KeyError): + scene.set_transform_to_multiscale_image("bad", transform) + + # Transform not set + with pytest.raises(KeyError): + scene.get_transform_to_multiscale_image("msi") + + # Not a MultiscaleImage + scene["img"]["col"] = soma.Collection.create(urljoin(img_uri, "col")) + with pytest.raises(typeguard.TypeCheckError): + scene.set_transform_to_multiscale_image("col", transform) + + scene.set_transform_to_multiscale_image("msi", transform) + + msi_transform = scene.get_transform_to_multiscale_image("msi") + if isinstance(coord_transform, soma.AffineTransform): + assert np.array_equal( + msi_transform.augmented_matrix, + transform.augmented_matrix, + ) + elif isinstance(coord_transform, soma.ScaleTransform): + assert np.array_equal( + msi_transform.scale_factors, + transform.scale_factors, + ) + elif isinstance( + coord_transform, (soma.UniformScaleTransform, soma.IdentityTransform) + ): + assert msi_transform.scale == transform.scale + + +@pytest.mark.skip("GeometryDataFrame not supported yet") +@pytest.mark.parametrize( + "coord_transform, transform_kwargs", + [ + (soma.AffineTransform, {"matrix": [[1, 0, 0], [0, 1, 0], [0, 0, 1]]}), + (soma.ScaleTransform, {"scale_factors": [1, 1]}), + (soma.UniformScaleTransform, {"scale": 1}), + (soma.IdentityTransform, {}), + ], +) +def test_scene_geometry_dataframe(tmp_path, coord_transform, transform_kwargs): + baseuri = urljoin(f"{tmp_path.as_uri()}/", "test_scene_geometry_dataframe") + + with soma.Scene.create(baseuri) as scene: + obsl_uri = urljoin(baseuri, "obsl") + scene["obsl"] = soma.Collection.create(obsl_uri) + + gdf_uri = urljoin(obsl_uri, "gdf") + asch = pa.schema([("x", pa.float64()), ("y", pa.float64())]) + coord_space = soma.CoordinateSpace([soma.Axis(name="x"), soma.Axis(name="y")]) + + # TODO replace with Scene.add_new_geometry_dataframe when implemented + scene["obsl"]["gdf"] = soma.GeometryDataFrame.create(gdf_uri, schema=asch) + + transform = coord_transform( + input_axes=("x", "y"), output_axes=("x", "y"), **transform_kwargs + ) + + # The scene coordinate space must be set before registering + with pytest.raises(soma.SOMAError): + scene.set_transform_to_geometry_dataframe("gdf", transform) + + scene.coordinate_space = coord_space + + # No SOMAObject named 'bad' in Scene + with pytest.raises(KeyError): + scene.set_transform_to_geometry_dataframe("bad", transform) + + # Not a GeometryDataFrame + scene["obsl"]["col"] = soma.Collection.create(urljoin(obsl_uri, "col")) + with pytest.raises(typeguard.TypeCheckError): + scene.set_transform_to_geometry_dataframe("col", transform) + + # Transform not set + with pytest.raises(KeyError): + scene.get_transform_to_geometry_dataframe("gdf") + + scene.set_transform_to_geometry_dataframe("gdf", transform) + + gdf_transform = scene.get_transform_to_geometry_dataframe("gdf") + if isinstance(coord_transform, soma.AffineTransform): + assert np.array_equal( + gdf_transform.augmented_matrix, + transform.augmented_matrix, + ) + elif isinstance(coord_transform, soma.ScaleTransform): + assert np.array_equal( + gdf_transform.scale_factors, + transform.scale_factors, + ) + elif isinstance( + coord_transform, (soma.UniformScaleTransform, soma.IdentityTransform) + ): + assert gdf_transform.scale == transform.scale