Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Option to create non-shared pm.Data #5295

Merged
merged 4 commits into from
Jan 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,10 @@ This includes API changes we did not warn about since at least `3.11.0` (2021-01
- Added partial dependence plots and individual conditional expectation plots [5091](https://github.com/pymc-devs/pymc3/pull/5091).
- Modify how particle weights are computed. This improves accuracy of the modeled function (see [5177](https://github.com/pymc-devs/pymc3/pull/5177)).
- Improve sampling, increase default number of particles [5229](https://github.com/pymc-devs/pymc3/pull/5229).
- `pm.Data` now passes additional kwargs to `aesara.shared`. [#5098](https://github.com/pymc-devs/pymc/pull/5098)
- New features for `pm.Data` containers:
- With `pm.Data(..., mutable=True/False)`, or by using `pm.MutableData` vs. `pm.ConstantData` one can now create `TensorConstant` data variables. They can be more performant and compatible in situtations where a variable doesn't need to be changed via `pm.set_data()`. See [#5295](https://github.com/pymc-devs/pymc/pull/5295).
- New named dimensions can be introduced to the model via `pm.Data(..., dims=...)`. For mutable data variables (see above) the lengths of these dimensions are symbolic, so they can be re-sized via `pm.set_data()`.
- `pm.Data` now passes additional kwargs to `aesara.shared`/`at.as_tensor`. [#5098](https://github.com/pymc-devs/pymc/pull/5098).
- ...


Expand Down
2 changes: 1 addition & 1 deletion pymc/backends/arviz.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def constant_data_to_xarray(self):
"""Convert constant data to xarray."""
# For constant data, we are concerned only with deterministics and
# data. The constant data vars must be either pm.Data
# (TensorSharedVariable) or pm.Deterministic
# (TensorConstant/SharedVariable) or pm.Deterministic
constant_data_vars = {} # type: Dict[str, Var]

def is_data(name, var) -> bool:
Expand Down
268 changes: 163 additions & 105 deletions pymc/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,21 @@
import os
import pkgutil
import urllib.request
import warnings

from copy import copy
from typing import Any, Dict, List, Sequence
from typing import Any, Dict, List, Optional, Sequence, Union

import aesara
import aesara.tensor as at
import numpy as np
import pandas as pd

from aesara.compile.sharedvalue import SharedVariable
from aesara.graph.basic import Apply
from aesara.tensor.type import TensorType
from aesara.tensor.var import TensorVariable
from aesara.tensor.var import TensorConstant, TensorVariable
from packaging import version

import pymc as pm

Expand All @@ -40,6 +43,8 @@
"Minibatch",
"align_minibatches",
"Data",
"ConstantData",
"MutableData",
]
BASE_URL = "https://github.com/raw/pymc-devs/pymc-examples/main/examples/data/{filename}"

Expand Down Expand Up @@ -463,16 +468,115 @@ def align_minibatches(batches=None):
rng.seed()


class Data:
"""Data container class that wraps :func:`aesara.shared` and lets
the model be aware of its inputs and outputs.
def determine_coords(model, value, dims: Optional[Sequence[str]] = None) -> Dict[str, Sequence]:
"""Determines coordinate values from data or the model (via ``dims``)."""
coords = {}

# If value is a df or a series, we interpret the index as coords:
if isinstance(value, (pd.Series, pd.DataFrame)):
dim_name = None
if dims is not None:
dim_name = dims[0]
if dim_name is None and value.index.name is not None:
dim_name = value.index.name
if dim_name is not None:
coords[dim_name] = value.index

# If value is a df, we also interpret the columns as coords:
if isinstance(value, pd.DataFrame):
dim_name = None
if dims is not None:
dim_name = dims[1]
if dim_name is None and value.columns.name is not None:
dim_name = value.columns.name
if dim_name is not None:
coords[dim_name] = value.columns

if isinstance(value, np.ndarray) and dims is not None:
if len(dims) != value.ndim:
raise pm.exceptions.ShapeError(
"Invalid data shape. The rank of the dataset must match the " "length of `dims`.",
actual=value.shape,
expected=value.ndim,
)
for size, dim in zip(value.shape, dims):
coord = model.coords.get(dim, None)
if coord is None:
coords[dim] = pd.RangeIndex(size, name=dim)

return coords


def ConstantData(
name: str,
value,
*,
dims: Optional[Sequence[str]] = None,
export_index_as_coords=False,
**kwargs,
) -> TensorConstant:
"""Alias for ``pm.Data(..., mutable=False)``.

Registers the ``value`` as a ``TensorConstant`` with the model.
"""
return Data(
name,
value,
dims=dims,
export_index_as_coords=export_index_as_coords,
mutable=False,
**kwargs,
)


def MutableData(
name: str,
value,
*,
dims: Optional[Sequence[str]] = None,
export_index_as_coords=False,
**kwargs,
) -> SharedVariable:
"""Alias for ``pm.Data(..., mutable=True)``.

Registers the ``value`` as a ``SharedVariable`` with the model.
"""
return Data(
name,
value,
dims=dims,
export_index_as_coords=export_index_as_coords,
mutable=True,
**kwargs,
)


def Data(
name: str,
value,
*,
dims: Optional[Sequence[str]] = None,
export_index_as_coords=False,
mutable: Optional[bool] = None,
**kwargs,
) -> Union[SharedVariable, TensorConstant]:
"""Data container that registers a data variable with the model.

Depending on the ``mutable`` setting (default: True), the variable
is registered as a ``SharedVariable``, enabling it to be altered
in value and shape, but NOT in dimensionality using ``pm.set_data()``.

Parameters
----------
name: str
The name for this variable
value: {List, np.ndarray, pd.Series, pd.Dataframe}
A value to associate with this variable
mutable : bool, optional
Switches between creating a ``SharedVariable`` (``mutable=True``, default)
vs. creating a ``TensorConstant`` (``mutable=False``).
Consider using ``pm.ConstantData`` or ``pm.MutableData`` as less verbose
alternatives to ``pm.Data(..., mutable=...)``.
dims: {str, tuple of str}, optional, default=None
Dimension names of the random variables (as opposed to the shapes of these
random variables). Use this when `value` is a pandas Series or DataFrame. The
Expand All @@ -495,7 +599,7 @@ class Data:
>>> observed_data = [mu + np.random.randn(20) for mu in true_mu]

>>> with pm.Model() as model:
... data = pm.Data('data', observed_data[0])
... data = pm.MutableData('data', observed_data[0])
... mu = pm.Normal('mu', 0, 10)
... pm.Normal('y', mu=mu, sigma=1, observed=data)

Expand All @@ -513,104 +617,58 @@ class Data:
For more information, take a look at this example notebook
https://docs.pymc.io/notebooks/data_container.html
"""
if isinstance(value, list):
value = np.array(value)

def __new__(
self,
name,
value,
*,
dims=None,
export_index_as_coords=False,
**kwargs,
):
if isinstance(value, list):
value = np.array(value)

# Add data container to the named variables of the model.
try:
model = pm.Model.get_context()
except TypeError:
raise TypeError(
"No model on context stack, which is needed to instantiate a data container. "
"Add variable inside a 'with model:' block."
)
name = model.name_for(name)

# `pandas_to_array` takes care of parameter `value` and
# transforms it to something digestible for pymc
shared_object = aesara.shared(pandas_to_array(value), name, **kwargs)

if isinstance(dims, str):
dims = (dims,)
if not (dims is None or len(dims) == shared_object.ndim):
raise pm.exceptions.ShapeError(
"Length of `dims` must match the dimensions of the dataset.",
actual=len(dims),
expected=shared_object.ndim,
# Add data container to the named variables of the model.
try:
model = pm.Model.get_context()
except TypeError:
raise TypeError(
"No model on context stack, which is needed to instantiate a data container. "
"Add variable inside a 'with model:' block."
)
name = model.name_for(name)

# `pandas_to_array` takes care of parameter `value` and
# transforms it to something digestible for Aesara.
arr = pandas_to_array(value)

if mutable is None:
current = version.Version(pm.__version__)
mutable = current.major == 4 and current.minor < 1
if mutable:
warnings.warn(
"The `mutable` kwarg was not specified. Currently it defaults to `pm.Data(mutable=True)`,"
" which is equivalent to using `pm.MutableData()`."
" In v4.1.0 the default will change to `pm.Data(mutable=False)`, equivalent to `pm.ConstantData`."
" Set `pm.Data(..., mutable=False/True)`, or use `pm.ConstantData`/`pm.MutableData`.",
FutureWarning,
)

coords = self.set_coords(model, value, dims)

if export_index_as_coords:
model.add_coords(coords)
elif dims:
# Register new dimension lengths
for d, dname in enumerate(dims):
if not dname in model.dim_lengths:
model.add_coord(dname, values=None, length=shared_object.shape[d])

# To draw the node for this variable in the graphviz Digraph we need
# its shape.
# XXX: This needs to be refactored
# shared_object.dshape = tuple(shared_object.shape.eval())
# if dims is not None:
# shape_dims = model.shape_from_dims(dims)
# if shared_object.dshape != shape_dims:
# raise pm.exceptions.ShapeError(
# "Data shape does not match with specified `dims`.",
# actual=shared_object.dshape,
# expected=shape_dims,
# )

model.add_random_variable(shared_object, dims=dims)

return shared_object

@staticmethod
def set_coords(model, value, dims=None) -> Dict[str, Sequence]:
coords = {}

# If value is a df or a series, we interpret the index as coords:
if isinstance(value, (pd.Series, pd.DataFrame)):
dim_name = None
if dims is not None:
dim_name = dims[0]
if dim_name is None and value.index.name is not None:
dim_name = value.index.name
if dim_name is not None:
coords[dim_name] = value.index

# If value is a df, we also interpret the columns as coords:
if isinstance(value, pd.DataFrame):
dim_name = None
if dims is not None:
dim_name = dims[1]
if dim_name is None and value.columns.name is not None:
dim_name = value.columns.name
if dim_name is not None:
coords[dim_name] = value.columns

if isinstance(value, np.ndarray) and dims is not None:
if len(dims) != value.ndim:
raise pm.exceptions.ShapeError(
"Invalid data shape. The rank of the dataset must match the "
"length of `dims`.",
actual=value.shape,
expected=value.ndim,
)
for size, dim in zip(value.shape, dims):
coord = model.coords.get(dim, None)
if coord is None:
coords[dim] = pd.RangeIndex(size, name=dim)

return coords
if mutable:
x = aesara.shared(arr, name, **kwargs)
else:
x = at.as_tensor_variable(arr, name, **kwargs)

if isinstance(dims, str):
dims = (dims,)
if not (dims is None or len(dims) == x.ndim):
raise pm.exceptions.ShapeError(
"Length of `dims` must match the dimensions of the dataset.",
actual=len(dims),
expected=x.ndim,
)

coords = determine_coords(model, value, dims)

if export_index_as_coords:
model.add_coords(coords)
elif dims:
# Register new dimension lengths
for d, dname in enumerate(dims):
if not dname in model.dim_lengths:
model.add_coord(dname, values=None, length=x.shape[d])

model.add_random_variable(x, dims=dims)

return x
11 changes: 6 additions & 5 deletions pymc/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1114,7 +1114,7 @@ def set_data(
):
"""Changes the values of a data variable in the model.

In contrast to pm.Data().set_value, this method can also
In contrast to pm.MutableData().set_value, this method can also
update the corresponding coordinates.

Parameters
Expand All @@ -1131,7 +1131,8 @@ def set_data(
shared_object = self[name]
if not isinstance(shared_object, SharedVariable):
raise TypeError(
f"The variable `{name}` must be a `SharedVariable` (e.g. `pymc.Data`) to allow updating. "
f"The variable `{name}` must be a `SharedVariable`"
" (created through `pm.MutableData()` or `pm.Data(mutable=True)`) to allow updating. "
f"The current type is: {type(shared_object)}"
)

Expand All @@ -1156,7 +1157,7 @@ def set_data(
length_changed = new_length != old_length

# Reject resizing if we already know that it would create shape problems.
# NOTE: If there are multiple pm.Data containers sharing this dim, but the user only
# NOTE: If there are multiple pm.MutableData containers sharing this dim, but the user only
# changes the values for one of them, they will run into shape problems nonetheless.
length_belongs_to = length_tensor.owner.inputs[0].owner.inputs[0]
if not isinstance(length_belongs_to, SharedVariable) and length_changed:
Expand Down Expand Up @@ -1735,8 +1736,8 @@ def set_data(new_data, model=None):

>>> import pymc as pm
>>> with pm.Model() as model:
... x = pm.Data('x', [1., 2., 3.])
... y = pm.Data('y', [1., 2., 3.])
... x = pm.MutableData('x', [1., 2., 3.])
... y = pm.MutableData('y', [1., 2., 3.])
... beta = pm.Normal('beta', 0, 1)
... obs = pm.Normal('obs', x * beta, 1, observed=y)
... idata = pm.sample(1000, tune=1000)
Expand Down
Loading