diff --git a/nengo_loihi/builder/builder.py b/nengo_loihi/builder/builder.py index a0570b9ab..bfb8751f2 100644 --- a/nengo_loihi/builder/builder.py +++ b/nengo_loihi/builder/builder.py @@ -1,7 +1,8 @@ from collections import defaultdict, OrderedDict import logging -from nengo import Network +from nengo import Network, Node, Ensemble, Connection, Probe +from nengo.builder import Model as NengoModel from nengo.builder.builder import Builder as NengoBuilder from nengo.builder.network import build_network from nengo.cache import NoDecoderCache @@ -89,10 +90,28 @@ def __init__(self, dt=0.001, label=None, builder=None): self.build_callback = None self.decoder_cache = NoDecoderCache() + # Host models filled in by the build process + def create_host_model(label, dt): + # We don't use a decoder cache because it requires a context + # manager that differs depending on which sub-model is being built + return NengoModel( + dt=float(dt), + label="%s, dt=%f" % (label, dt), + decoder_cache=NoDecoderCache()) + + # TODO: these models may not look/behave exactly the same as + # standard nengo models, because they don't have a toplevel network + # built into them or configs set + self.host_pre = create_host_model(label="%s, host_pre" % label, dt=dt) + self.host = create_host_model(label="%s, host" % label, dt=dt) + # Objects created by the model for simulation on Loihi self.inputs = OrderedDict() self.blocks = OrderedDict() + # Will be filled in by the simulator __init__ + self.splitter_directive = None + # Will be filled in by the network builder self.toplevel = None self.config = None @@ -128,8 +147,11 @@ def __init__(self, dt=0.001, label=None, builder=None): # magnitude/weight resolution) self.pes_wgt_exp = 4 - # Will be provided by Simulator + # Used to track interactions between host models self.chip2host_params = {} + self.chip2host_receivers = OrderedDict() + self.host2chip_senders = OrderedDict() + self.needs_sender = {} def __getstate__(self): raise NotImplementedError("Can't pickle nengo_loihi.builder.Model") @@ -150,8 +172,38 @@ def add_block(self, block): assert block not in self.blocks self.blocks[block] = len(self.blocks) + def delegate(self, obj): + if not isinstance(obj, (Node, Ensemble, Probe)): + # Note: this is safe because any objects built from within a normal + # nengo model (other than self) will not be re-delegated + return self + elif self.splitter_directive.on_chip(obj): + return self + elif self.splitter_directive.is_precomputable(obj): + return self.host_pre + else: + return self.host + def build(self, obj, *args, **kwargs): - built = self.builder.build(self, obj, *args, **kwargs) + # Don't build the passthrough nodes or connections + passthrough_directive = self.splitter_directive.passthrough_directive + if (isinstance(obj, Node) + and obj in passthrough_directive.removed_passthroughs): + return None + if (isinstance(obj, Connection) + and obj in passthrough_directive.removed_connections): + return None + + # Note: any callbacks for host_pre or host will not be invoked here + model = self.delegate(obj) + if model is not self: + # done for compatibility with nengo<=2.8.0 + # otherwise we could just copy over the initial + # seeding to all other models + model.seeds[obj] = self.seeds[obj] + model.seeded[obj] = self.seeded[obj] + + built = model.builder.build(model, obj, *args, **kwargs) if self.build_callback is not None: self.build_callback(obj) return built diff --git a/nengo_loihi/builder/connection.py b/nengo_loihi/builder/connection.py index c7ba04c6f..02db302d9 100644 --- a/nengo_loihi/builder/connection.py +++ b/nengo_loihi/builder/connection.py @@ -1,5 +1,8 @@ +import copy +import logging + import nengo -from nengo import Ensemble, Connection, Node +from nengo import Ensemble, Connection, Node, Probe as NengoProbe from nengo.builder.connection import ( build_no_solver as _build_no_solver, BuiltConnection, @@ -7,6 +10,7 @@ get_targets, multiply, ) +from nengo.connection import LearningRule from nengo.ensemble import Neurons from nengo.exceptions import BuildError, ValidationError from nengo.solvers import NoSolver, Solver @@ -15,10 +19,267 @@ from nengo_loihi import conv from nengo_loihi.block import Axon, LoihiBlock, Probe, Synapse from nengo_loihi.builder.builder import Builder -from nengo_loihi.builder.inputs import ChipReceiveNeurons, LoihiInput +from nengo_loihi.builder.inputs import ( + LoihiInput, + ChipReceiveNode, + ChipReceiveNeurons, + HostSendNode, + HostReceiveNode, + PESModulatoryTarget, +) from nengo_loihi.compat import ( nengo_transforms, sample_transform, conn_solver) from nengo_loihi.neurons import loihi_rates +from nengo_loihi.passthrough import base_obj + +logger = logging.getLogger(__name__) + + +def _inherit_seed(dest_model, dest_obj, src_model, src_obj): + dest_model.seeded[dest_obj] = src_model.seeded[src_obj] + dest_model.seeds[dest_obj] = src_model.seeds[src_obj] + + +@Builder.register(Connection) +def build_connection(model, conn): + is_pre_chip = model.splitter_directive.on_chip(base_obj(conn.pre)) + + if isinstance(conn.post_obj, LearningRule): + assert not is_pre_chip + return build_host_to_learning_rule(model, conn) + + is_post_chip = model.splitter_directive.on_chip(base_obj(conn.post)) + + if is_pre_chip and is_post_chip: + build_chip_connection(model, conn) + + elif not is_pre_chip and is_post_chip: + if isinstance(conn.pre_obj, Neurons): + build_host_neurons_to_chip(model, conn) + else: + build_host_to_chip(model, conn) + + elif is_pre_chip and not is_post_chip: + build_chip_to_host(model, conn) + + else: + assert is_pre_chip == is_post_chip + nengo_model = model.delegate(base_obj(conn.pre)) + assert nengo_model is model.delegate(base_obj(conn.post)) + _inherit_seed(nengo_model, conn, model, conn) + nengo_model.build(conn) + + +def build_host_neurons_to_chip(model, conn): + """Send spikes over and do the rest of the connection on-chip""" + + assert not isinstance(conn.post, LearningRule) + dim = conn.size_in + nengo_model = model.delegate(base_obj(conn.pre)) + + logger.debug("Creating ChipReceiveNeurons for %s", conn) + receive = ChipReceiveNeurons( + dim, + neuron_type=conn.pre_obj.ensemble.neuron_type, + label=None if conn.label is None else "%s_neurons" % conn.label, + add_to_container=False, + ) + _inherit_seed(model, receive, model, conn) + model.builder.build(model, receive) + + receive2post = Connection( + receive, + conn.post, + transform=conn.transform, + synapse=conn.synapse, + label=None if conn.label is None else "%s_chip" % conn.label, + add_to_container=False, + ) + _inherit_seed(model, receive2post, model, conn) + build_chip_connection(model, receive2post) + + logger.debug("Creating HostSendNode for %s", conn) + send = HostSendNode( + dim, + label=None if conn.label is None else "%s_send" % conn.label, + add_to_container=False, + ) + nengo_model.build(send) + + pre2send = Connection( + conn.pre, + send, + synapse=None, + label=None if conn.label is None else "%s_host" % conn.label, + add_to_container=False, + ) + model.host2chip_senders[send] = receive + _inherit_seed(nengo_model, pre2send, model, conn) + nengo_model.build(pre2send) + + +def build_host_to_chip(model, conn): + rng = np.random.RandomState(model.seeds[conn]) + dim = conn.size_out + nengo_model = model.delegate(base_obj(conn.pre)) + + logger.debug("Creating ChipReceiveNode for %s", conn) + receive = ChipReceiveNode( + dim * 2, + size_out=dim, + label=None if conn.label is None else "%s_node" % conn.label, + add_to_container=False, + ) + model.builder.build(model, receive) + + receive2post = Connection( + receive, + conn.post, + synapse=model.decode_tau, + label=None if conn.label is None else "%s_chip" % conn.label, + add_to_container=False, + ) + _inherit_seed(model, receive2post, model, conn) + build_chip_connection(model, receive2post) + + logger.debug("Creating DecodeNeuron ensemble for %s", conn) + ens = model.node_neurons.get_ensemble(dim) + ens.label = None if conn.label is None else "%s_ens" % conn.label + _inherit_seed(nengo_model, ens, model, conn) + nengo_model.build(ens) + + if nengo_transforms is not None and isinstance( + conn.transform, nengo_transforms.Convolution): + raise BuildError( + "Conv2D transforms not supported for off-chip to " + "on-chip connections where `pre` is not a Neurons object.") + + # Scale the input spikes based on the radius of the target ensemble + weights = sample_transform(conn, rng=rng) + + if isinstance(conn.post_obj, Ensemble): + weights = weights / conn.post_obj.radius + + if nengo_transforms is None: + transform = weights + else: + # copy the Transform information, setting `init` to the sampled weights + transform = copy.copy(conn.transform) + type(transform).init.data[transform] = weights + + pre2ens = Connection( + conn.pre, + ens, + function=conn.function, + solver=conn.solver, + eval_points=conn.eval_points, + scale_eval_points=conn.scale_eval_points, + synapse=conn.synapse, + transform=transform, + label=None if conn.label is None else "%s_enc" % conn.label, + add_to_container=False, + ) + _inherit_seed(nengo_model, pre2ens, model, conn) + nengo_model.build(pre2ens) + + logger.debug("Creating HostSendNode for %s", conn) + send = HostSendNode( + dim * 2, + label=None if conn.label is None else "%s_send" % conn.label, + add_to_container=False, + ) + nengo_model.build(send) + + ensneurons2send = Connection( + ens.neurons, + send, + synapse=None, + label=None if conn.label is None else "%s_host" % conn.label, + add_to_container=False, + ) + _inherit_seed(nengo_model, ensneurons2send, model, conn) + model.host2chip_senders[send] = receive + nengo_model.build(ensneurons2send) + + +def build_chip_to_host(model, conn): + rng = np.random.RandomState(model.seeds[conn]) + dim = conn.size_out + nengo_model = model.delegate(base_obj(conn.post)) + + logger.debug("Creating HostReceiveNode for %s", conn) + receive = HostReceiveNode( + dim, + label=None if conn.label is None else "%s_receive" % conn.label, + add_to_container=False, + ) + nengo_model.build(receive) + + receive2post = Connection( + receive, + conn.post, + synapse=conn.synapse, + label=None if conn.label is None else "%s_host" % conn.label, + add_to_container=False, + ) + _inherit_seed(nengo_model, receive2post, model, conn) + nengo_model.build(receive2post) + + logger.debug("Creating Probe for %s", conn) + transform = sample_transform(conn, rng=rng) + + probe = NengoProbe(conn.pre, + synapse=None, + solver=conn.solver, + add_to_container=False) + model.chip2host_params[probe] = dict( + learning_rule_type=conn.learning_rule_type, + function=conn.function, + eval_points=conn.eval_points, + scale_eval_points=conn.scale_eval_points, + transform=transform, + label=None if conn.label is None else "%s_probe" % conn.label, + ) + model.chip2host_receivers[probe] = receive + _inherit_seed(model, probe, model, conn) + model.builder.build(model, probe) + + if conn.learning_rule_type is not None: + if not isinstance(conn.pre_obj, Ensemble): + raise NotImplementedError( + "Learning rule presynaptic object must be an Ensemble " + "(got %r)" % type(conn.pre_obj).__name__) + model.needs_sender[conn.learning_rule] = PESModulatoryTarget(probe) + + +def build_host_to_learning_rule(model, conn): + dim = conn.size_out + nengo_model = model.delegate(base_obj(conn.pre)) + + logger.debug("Creating HostSendNode for %s", conn) + send = HostSendNode( + dim, + label=None if conn.label is None else "%s_send" % conn.label, + add_to_container=False, + ) + nengo_model.build(send) + + pre2send = Connection( + conn.pre, + send, + function=conn.function, + solver=conn.solver, + eval_points=conn.eval_points, + scale_eval_points=conn.scale_eval_points, + synapse=conn.synapse, + transform=conn.transform, + label=conn.label, + add_to_container=False, + ) + pes_target = model.needs_sender[conn.post_obj] + model.host2chip_senders[send] = pes_target + _inherit_seed(nengo_model, pre2send, model, conn) + nengo_model.build(pre2send) def build_decoders(model, conn, rng, sampled_transform): @@ -114,8 +375,7 @@ def build_no_solver(model, solver, conn, rng, sampled_transform): return _build_no_solver(model, solver, conn, rng) -@Builder.register(Connection) # noqa: C901 -def build_connection(model, conn): +def build_chip_connection(model, conn): # noqa: C901 if nengo_transforms is not None: if isinstance(conn.transform, nengo_transforms.Convolution): # TODO: integrate these into the same function diff --git a/nengo_loihi/builder/probe.py b/nengo_loihi/builder/probe.py index fda71d5ba..d081c9e91 100644 --- a/nengo_loihi/builder/probe.py +++ b/nengo_loihi/builder/probe.py @@ -1,5 +1,6 @@ import nengo from nengo import Ensemble, Connection, Node +from nengo.base import ObjView from nengo.connection import LearningRule from nengo.ensemble import Neurons from nengo.exceptions import BuildError @@ -45,6 +46,10 @@ def conn_probe(model, nengo_probe): raise NotImplementedError() target = nengo.Node(size_in=output_dim, add_to_container=False) + # TODO: This is a hack so that the builder can properly delegate the + # connection build to the right method + model.splitter_directive._seen_objects.add(target) + model.splitter_directive._chip_objects.add(target) conn = Connection( nengo_probe.target, @@ -70,10 +75,15 @@ def conn_probe(model, nengo_probe): model.seeded[conn] = model.seeded[nengo_probe] model.seeds[conn] = model.seeds[nengo_probe] + if isinstance(nengo_probe.target, ObjView): + target_obj = nengo_probe.target.obj + else: + target_obj = nengo_probe.target + d = conn.size_out - if isinstance(nengo_probe.target, Ensemble): + if isinstance(target_obj, Ensemble): # probed values are scaled by the target ensemble's radius - scale = nengo_probe.target.radius + scale = target_obj.radius w = np.diag(scale * np.ones(d)) weights = np.vstack([w, -w]) else: diff --git a/nengo_loihi/builder/tests/test_connection.py b/nengo_loihi/builder/tests/test_connection.py new file mode 100644 index 000000000..f4e5ccd78 --- /dev/null +++ b/nengo_loihi/builder/tests/test_connection.py @@ -0,0 +1,67 @@ +from distutils.version import LooseVersion + +import nengo +from nengo.exceptions import BuildError +import numpy as np +import pytest + + +@pytest.mark.skipif(LooseVersion(nengo.__version__) <= LooseVersion('2.8.0'), + reason="requires more recent Nengo version") +def test_split_conv2d_transform_error(Simulator): + with nengo.Network() as net: + node_offchip = nengo.Node([1]) + ens_onchip = nengo.Ensemble(10, 1) + conv2d = nengo.Convolution( + n_filters=1, input_shape=(1, 1, 1), kernel_size=(1, 1)) + nengo.Connection(node_offchip, ens_onchip, transform=conv2d) + + with pytest.raises(BuildError, match="Conv2D"): + with Simulator(net): + pass + + +@pytest.mark.parametrize("pre_dims", [1, 3]) +@pytest.mark.parametrize("post_dims", [1, 3]) +@pytest.mark.parametrize("learn", [True, False]) +@pytest.mark.parametrize("use_solver", [True, False]) +def test_manual_decoders( + seed, Simulator, pre_dims, post_dims, learn, use_solver): + + with nengo.Network(seed=seed) as model: + pre = nengo.Ensemble(50, dimensions=pre_dims, + gain=np.ones(50), + bias=np.ones(50) * 5) + post = nengo.Node(size_in=post_dims) + + learning_rule_type = nengo.PES() if learn else None + weights = np.zeros((post_dims, 50)) + if use_solver: + conn = nengo.Connection(pre, post, + function=lambda x: np.zeros(post_dims), + learning_rule_type=learning_rule_type, + solver=nengo.solvers.NoSolver(weights.T)) + else: + conn = nengo.Connection(pre.neurons, post, + learning_rule_type=learning_rule_type, + transform=weights) + + if learn: + error = nengo.Node(np.zeros(post_dims)) + nengo.Connection(error, conn.learning_rule) + + pre_probe = nengo.Probe(pre.neurons, synapse=None) + post_probe = nengo.Probe(post, synapse=None) + + if not use_solver and learn: + with pytest.raises(NotImplementedError): + with Simulator(model) as sim: + pass + else: + with Simulator(model) as sim: + sim.run(0.1) + + # Ensure pre population has a lot of activity + assert np.mean(sim.data[pre_probe]) > 100 + # But that post has no activity due to the zero weights + assert np.all(sim.data[post_probe] == 0) diff --git a/nengo_loihi/builder/tests/test_inputs.py b/nengo_loihi/builder/tests/test_inputs.py index 07f1c651f..00802dd75 100644 --- a/nengo_loihi/builder/tests/test_inputs.py +++ b/nengo_loihi/builder/tests/test_inputs.py @@ -1,8 +1,9 @@ import nengo from nengo.exceptions import SimulationError +import numpy as np import pytest -from nengo_loihi.builder.inputs import ChipReceiveNode +from nengo_loihi.builder.inputs import ChipReceiveNode, PESModulatoryTarget def test_chipreceivenode_run_error(): @@ -12,3 +13,35 @@ def test_chipreceivenode_run_error(): with pytest.raises(SimulationError, match="should not be run"): with nengo.Simulator(net) as sim: sim.step() + + +def test_pesmodulatorytarget_interface(): + target = "target" + p = PESModulatoryTarget(target) + + t0 = 4 + e0 = [1.8, 2.4, 3.3] + t1 = t0 + 3 + e1 = [7.2, 2.2, 4.1] + e01 = np.array(e0) + np.array(e1) + + p.receive(t0, e0) + assert isinstance(p.errors[t0], np.ndarray) + assert np.allclose(p.errors[t0], e0) + + p.receive(t0, e1) + assert np.allclose(p.errors[t0], e01) + + with pytest.raises(AssertionError): + p.receive(t0 - 1, e0) # time needs to be >= last time + + p.receive(t1, e1) + assert np.allclose(p.errors[t1], e1) + + errors = list(p.collect_errors()) + assert len(errors) == 2 + assert errors[0][:2] == (target, t0) and np.allclose(errors[0][2], e01) + assert errors[1][:2] == (target, t1) and np.allclose(errors[1][2], e1) + + p.clear() + assert len(list(p.collect_errors())) == 0 diff --git a/nengo_loihi/passthrough.py b/nengo_loihi/passthrough.py index c2ee42be2..bb96944bf 100644 --- a/nengo_loihi/passthrough.py +++ b/nengo_loihi/passthrough.py @@ -1,22 +1,28 @@ -from collections import OrderedDict +from collections import OrderedDict, namedtuple import warnings from nengo import Connection, Lowpass, Node +from nengo.base import ObjView from nengo.connection import LearningRule from nengo.ensemble import Neurons from nengo.exceptions import BuildError, NengoException import numpy as np - from nengo_loihi.compat import nengo_transforms, transform_array +PassthroughDirective = namedtuple( + "PassthroughDirective", + ["removed_passthroughs", "removed_connections", "added_connections"]) + def is_passthrough(obj): return isinstance(obj, Node) and obj.output is None def base_obj(obj): - """Returns the Ensemble or Node underlying an object""" + """Returns the object underlying some view or neurons.""" + if isinstance(obj, ObjView): + obj = obj.obj if isinstance(obj, Neurons): return obj.ensemble return obj @@ -210,7 +216,7 @@ def generate_conns(self): ) -def find_clusters(net, offchip): +def find_clusters(net, ignore): """Create the Clusters for a given nengo Network.""" # find which objects have Probes, as we need to make sure to keep them @@ -218,17 +224,17 @@ def find_clusters(net, offchip): clusters = OrderedDict() # mapping from object to its Cluster for c in net.all_connections: - base_pre = base_obj(c.pre_obj) - base_post = base_obj(c.post_obj) + base_pre = base_obj(c.pre) + base_post = base_obj(c.post) - pass_pre = is_passthrough(c.pre_obj) and c.pre_obj not in offchip + pass_pre = is_passthrough(c.pre_obj) and c.pre_obj not in ignore if pass_pre and c.pre_obj not in clusters: # add new objects to their own initial Cluster clusters[c.pre_obj] = Cluster(c.pre_obj) if c.pre_obj in probed_objs: clusters[c.pre_obj].probed_objs.add(c.pre_obj) - pass_post = is_passthrough(c.post_obj) and c.post_obj not in offchip + pass_post = is_passthrough(c.post_obj) and c.post_obj not in ignore if pass_post and c.post_obj not in clusters: # add new objects to their own initial Cluster clusters[c.post_obj] = Cluster(c.post_obj) @@ -254,7 +260,7 @@ def find_clusters(net, offchip): return clusters -def convert_passthroughs(network, offchip): +def convert_passthroughs(network, ignore): """Create a set of Connections that could replace the passthrough Nodes. This does not actually modify the Network, but instead returns the @@ -262,11 +268,12 @@ def convert_passthroughs(network, offchip): and the Connections that should be added to replace the Nodes and Connections. - The parameter offchip provides a list of objects that should be considered - to be offchip. The system will only remove passthrough Nodes that go - between two onchip objects. + The parameter ignore provides a list of objects (i.e., ensembles and nodes) + that should not be considered by the passthrough removal process. + The system will only remove passthrough Nodes where neither pre nor post + are ignored. """ - clusters = find_clusters(network, offchip=offchip) + clusters = find_clusters(network, ignore=ignore) removed_passthroughs = set() removed_connections = set() @@ -278,11 +285,11 @@ def convert_passthroughs(network, offchip): onchip_input = False onchip_output = False for c in cluster.conns_in: - if base_obj(c.pre_obj) not in offchip: + if base_obj(c.pre) not in ignore: onchip_input = True break for c in cluster.conns_out: - if base_obj(c.post_obj) not in offchip: + if base_obj(c.post) not in ignore: onchip_output = True break has_input = len(cluster.conns_in) > 0 @@ -300,4 +307,6 @@ def convert_passthroughs(network, offchip): | cluster.conns_mid | cluster.conns_out) added_connections.update(new_conns) - return removed_passthroughs, removed_connections, added_connections + + return PassthroughDirective( + removed_passthroughs, removed_connections, added_connections) diff --git a/nengo_loihi/simulator.py b/nengo_loihi/simulator.py index c1e0a7dae..471f38d1d 100644 --- a/nengo_loihi/simulator.py +++ b/nengo_loihi/simulator.py @@ -4,7 +4,6 @@ import warnings import nengo -from nengo.cache import get_default_decoder_cache from nengo.exceptions import ( ReadonlyError, SimulatorClosed, @@ -20,7 +19,7 @@ from nengo_loihi.discretize import discretize_model from nengo_loihi.emulator import EmulatorInterface from nengo_loihi.hardware import HardwareInterface, HAS_NXSDK -from nengo_loihi.splitter import split +from nengo_loihi.splitter import SplitterDirective import nengo_loihi.config as config logger = logging.getLogger(__name__) @@ -306,7 +305,7 @@ def __init__( # noqa: C901 assert self.model.dt == dt self.precompute = precompute - self.networks = None + self.network = network self.sims = OrderedDict() self._run_steps = None @@ -317,43 +316,45 @@ def __init__( # noqa: C901 config.add_params(network) # ensure seeds are identical to nengo + # this has no effect for nengo<=2.8.0 seed_network(network, seeds=self.model.seeds, seeded=self.model.seeded) - # split the host into one, two or three networks - self.networks = split( + # determine how to split the host into one, two or three models + self.model.splitter_directive = SplitterDirective( network, precompute=precompute, - node_neurons=self.model.node_neurons, - node_tau=self.model.decode_tau, - remove_passthrough=remove_passthrough, - ) - network = self.networks.chip - - self.model.chip2host_params = self.networks.chip2host_params - - self.chip = self.networks.chip - self.host = self.networks.host - self.host_pre = self.networks.host_pre - - if len(self.host_pre.all_objects) > 0: - host_pre_model = self._get_host_model( - self.host_pre, dt=dt, seeds=self.model.seeds, - seeded=self.model.seeded) - self.sims["host_pre"] = nengo.Simulator(self.host_pre, - dt=self.dt, - model=host_pre_model, - progress_bar=False, - optimize=False) - - if len(self.host.all_objects) > 0: - host_model = self._get_host_model( - self.host, dt=dt, seeds=self.model.seeds, - seeded=self.model.seeded) + remove_passthrough=remove_passthrough) + + # Build the network into the model + self.model.build(network) + + # Build the extra passthrough connections into the model + passthrough_directive = ( + self.model.splitter_directive.passthrough_directive) + for conn in passthrough_directive.added_connections: + # https://github.com/nengo/nengo-loihi/issues/210 + self.model.seeds[conn] = None + self.model.seeded[conn] = False + self.model.build(conn) + + if len(self.model.host_pre.params): + assert precompute + self.sims["host_pre"] = nengo.Simulator( + network=None, + dt=self.dt, + model=self.model.host_pre, + progress_bar=False, + optimize=False) + elif precompute: + warnings.warn("No precomputable objects. Setting " + "precompute=True has no effect.") + + if len(self.model.host.params): self.sims["host"] = nengo.Simulator( - self.host, + network=None, dt=self.dt, - model=host_model, + model=self.model.host, progress_bar=False, optimize=False) elif not precompute: @@ -364,9 +365,6 @@ def __init__( # noqa: C901 # to specify `precompute` unless they absolutely have to. self.precompute = True - # Build the network into the model - self.model.build(network) - self._probe_outputs = self.model.params self.data = ProbeDict(self._probe_outputs) for sim in self.sims.values(): @@ -403,16 +401,6 @@ def __init__( # noqa: C901 self.closed = False self.reset(seed=seed) - @staticmethod - def _get_host_model(network, dt, seeds, seeded): - model = nengo.builder.Model( - dt=float(dt), - label="%s, dt=%f" % (network, dt), - decoder_cache=get_default_decoder_cache()) - model.seeds.update(seeds) - model.seeded.update(seeded) - return model - def __del__(self): """Raise a ResourceWarning if we are deallocated while open.""" if not self.closed: @@ -468,7 +456,7 @@ def _probe(self): self._probe_step_time() for probe in self.model.probes: - if probe in self.networks.chip2host_params: + if probe in self.model.chip2host_params: continue assert probe.sample_every is None, ( "probe.sample_every not implemented") @@ -551,7 +539,7 @@ def step(self): def _collect_receiver_info(self): spikes = [] errors = OrderedDict() - for sender, receiver in self.networks.host2chip_senders.items(): + for sender, receiver in self.model.host2chip_senders.items(): receiver.clear() for t, x in sender.queue: receiver.receive(t, x) @@ -584,7 +572,7 @@ def _host2chip(self, sim): def _chip2host(self, sim): probes_receivers = OrderedDict( # map probes to receivers (self.model.objs[probe]['out'], receiver) - for probe, receiver in self.networks.chip2host_receivers.items()) + for probe, receiver in self.model.chip2host_receivers.items()) sim.chip2host(probes_receivers) def _make_run_steps(self): diff --git a/nengo_loihi/splitter.py b/nengo_loihi/splitter.py index 4d3725ffe..b029309cb 100644 --- a/nengo_loihi/splitter.py +++ b/nengo_loihi/splitter.py @@ -1,524 +1,141 @@ -from collections import defaultdict, OrderedDict -import copy -import logging -import warnings +from collections import defaultdict -from nengo import Connection, Direct, Ensemble, Network, Node, Probe -from nengo.base import ObjView -from nengo.connection import LearningRule -from nengo.ensemble import Neurons +from nengo import Direct, Ensemble, Node, Probe from nengo.exceptions import BuildError -import numpy as np - -from nengo_loihi.builder.inputs import ( - ChipReceiveNode, - ChipReceiveNeurons, - HostSendNode, - HostReceiveNode, - PESModulatoryTarget, -) -from nengo_loihi.compat import nengo_transforms, sample_transform -from nengo_loihi.passthrough import convert_passthroughs - -logger = logging.getLogger(__name__) - - -def base_obj(obj): - if isinstance(obj, ObjView): - obj = obj.obj - if isinstance(obj, Neurons): - return obj.ensemble - elif isinstance(obj, LearningRule): - return obj.connection - return obj - - -class SplitNetworks: - def __init__(self, original, node_neurons=None, node_tau=0.005): - self.original = original - self.node_neurons = node_neurons - self.node_tau = node_tau - - self.host = Network(seed=original.seed) - self.chip = Network(seed=original.seed) - self.host_pre = Network(seed=original.seed) - - self.targets = ("host", "chip", "host_pre") - - # Interactions between rules - self.needs_sender = {} - - # Used later in the build process - self.chip2host_params = {} - self.chip2host_receivers = OrderedDict() - self.host2chip_senders = OrderedDict() - - self.adds = OrderedDict() - self.moves = OrderedDict() - self.removes = [] - - def __contains__(self, obj): - obj = base_obj(obj) - return (obj in self.moves - or obj in self.adds - or obj in self.removes) - - def add(self, obj, target): - assert target in self.targets, "invalid target" - obj = base_obj(obj) - assert obj not in self, "obj already moved" - self.adds[obj] = target - - def finalize(self): - def _add(obj, net): - for cls in type(obj).__mro__: - if cls in net.objects: - net.objects[cls].append(obj) - break - else: - assert False, "cannot handle type %r" % (type(obj).__name__,) - - # Ensure that all objects have been dealt with - for obj in self.original.all_objects: - if not isinstance(obj, Network): - assert obj in self, ( - "%s not moved or explicitly removed" % (obj,)) - - # Process moves and adds - for obj, target in self.moves.items(): - _add(obj, getattr(self, target)) - for obj, target in self.adds.items(): - _add(obj, getattr(self, target)) - - def location(self, obj, default=None): - obj = base_obj(obj) - return self.moves.get(obj, self.adds.get(obj, default)) +from nengo.connection import LearningRule - def move(self, obj, target, force=False): - obj = base_obj(obj) - if not force: - assert obj not in self, "already moved" - assert target in self.targets, "invalid target" - logger.debug("Moving %s to %s", obj, target) - if obj in self.adds: - self.adds[obj] = target +from nengo_loihi.passthrough import ( + convert_passthroughs, PassthroughDirective, base_obj, is_passthrough) + + +class SplitterDirective: + """Creates a set of directives to guide the builder.""" + + def __init__(self, network, precompute=False, remove_passthrough=True): + self.network = network + + # subset of network: only nodes and ensembles; + # probes are handled dynamically + self._seen_objects = set() + + # subset of seen, marking which are run on the hardware; + # those running on the host are "seen - chip" + self._chip_objects = set() + + # Step 1. Place nodes on host + self._seen_objects.update(network.all_nodes) + + # Step 2. Place all possible ensembles on chip + # Note: assumes add_params already called by the simulator + for ens in network.all_ensembles: + if (network.config[ens].on_chip in (None, True) + and not isinstance(ens.neuron_type, Direct)): + self._chip_objects.add(ens) + self._seen_objects.add(ens) + + # Step 3. Move learning ensembles (post and error) to host + for conn in network.all_connections: + pre = base_obj(conn.pre) + post = base_obj(conn.post) + if (conn.learning_rule_type is not None + and isinstance(post, Ensemble) + and post in self._chip_objects): + if network.config[post].on_chip: + raise BuildError("Post ensemble (%r) of learned " + "connection (%r) must not be configured " + "as on_chip." % (post, conn)) + self._chip_objects.remove(post) + elif (isinstance(post, LearningRule) + and isinstance(pre, Ensemble) + and pre in self._chip_objects): + if network.config[pre].on_chip: + raise BuildError("Pre ensemble (%r) of error " + "connection (%r) must not be configured " + "as on_chip." % (pre, conn)) + self._chip_objects.remove(pre) + + # Step 4. Mark passthrough nodes for removal + if remove_passthrough: + passthroughs = set( + obj for obj in network.all_nodes if is_passthrough(obj)) + ignore = self._seen_objects - self._chip_objects - passthroughs + self.passthrough_directive = convert_passthroughs(network, ignore) else: - self.moves[obj] = target - - def remove(self, obj): - obj = base_obj(obj) - logger.debug("Removing %s", obj) - self.removes.append(obj) - if obj in self.adds: - del self.adds[obj] - elif obj in self.moves: - del self.moves[obj] - + self.passthrough_directive = PassthroughDirective( + set(), set(), set()) -def split(net, precompute, node_neurons, node_tau, remove_passthrough=False): - logger.info("Splitting model into host and chip parts") - networks = SplitNetworks(net, node_neurons=node_neurons, - node_tau=node_tau) - - # --- Step 1: place ensembles and nodes - place_nodes(networks) - place_ensembles(networks) - - # --- Step 1b: remove passthrough nodes - if remove_passthrough: - conns = merge_passthrough_nodes(networks) - else: - conns = networks.original.all_connections - - # --- Step 2: place simple connections - place_internetwork_connections(networks, conns) - - # --- Step 3: split complex connections - split_host_to_chip_connections(networks, conns) - split_chip_to_host_connections(networks, conns) - split_host_to_learning_rules(networks, conns) - - # --- Step 4: place precomputable parts of host - if precompute: - split_pre_from_host(networks) - - # --- Step 5: place probes - place_probes(networks) - - # Commit to the moves marked in the previous steps - networks.finalize() - if precompute: - if len(networks.host_pre.all_objects) == 0: - warnings.warn("No precomputable objects. Setting precompute=True " - "has no effect.") - else: - assert len(networks.host_pre.all_objects) == 0, ( - "Object erroneously added to host_pre") - - return networks - - -def place_nodes(networks): - # Only ChipReceiveNodes can be run on chip - for node in networks.original.all_nodes: - if isinstance(node, ChipReceiveNode): - # Typically ChipReceiveNodes are created by the splitter, but - # it's conceivable that advanced users might make them manually - networks.move(node, "chip") + # Step 5. Split precomputable parts of host + # This is a subset of host, marking which are precomputable + if precompute: + self._host_precomputable_objects = self._preclosure() else: - networks.move(node, "host") - - -def place_ensembles(networks): - config = networks.original.config - - for ens in networks.original.all_ensembles: - # User-specified config takes precedence - if config[ens].on_chip is not None: - networks.move(ens, "chip" if config[ens].on_chip else "host") - # Direct mode ensembles must be off-chip - elif isinstance(ens.neuron_type, Direct): - networks.move(ens, "host") - - for conn in networks.original.all_connections: - # `post` of learning rules must be off chip - if (conn.learning_rule_type is not None - and isinstance(base_obj(conn.post_obj), Ensemble) - and conn.post_obj not in networks): - networks.move(conn.post_obj, "host") - # `error` of learning rules must be off chip - elif (isinstance(conn.post_obj, LearningRule) - and isinstance(base_obj(conn.pre_obj), Ensemble) - and conn.pre_obj not in networks): - networks.move(conn.pre_obj, "host") - - # All other ensembles are placed on chip - for ens in networks.original.all_ensembles: - if ens not in networks: - networks.move(ens, "chip") - - -def merge_passthrough_nodes(networks): - offchip = set() - for obj, target in networks.moves.items(): - if isinstance(obj, Node) and obj.output is None: - # this is a passthrough Node so don't force it to be offchip - continue - elif target == 'host': - offchip.add(obj) - - remove_nodes, remove_conns, add_conns = convert_passthroughs( - networks.original, offchip) - for n in remove_nodes: - networks.remove(n) - for c in remove_conns: - networks.remove(c) - - conns = networks.original.all_connections - for c in remove_conns: - conns.remove(c) - conns.extend(add_conns) - - return conns - + self._host_precomputable_objects = set() -def place_internetwork_connections(networks, conns): - """Connections from two objects placed in the same location go there. + def _preclosure(self): # noqa: C901 + # performs a transitive closure on all host objects that + # send data to the chip + precomputable = set() - That is, connections from two objects on the host are done on the host, - and connections from two objects on the chip are done on the chip. - """ - for conn in conns: - pre_loc = networks.location(conn.pre_obj) - post_loc = networks.location(conn.post_obj) - if pre_loc == post_loc: - if pre_loc == "chip": - assert conn.learning_rule_type is None - networks.move(conn, pre_loc) + # forwards and backwards adjacency lists + pre_to_conn = defaultdict(list) + post_to_conn = defaultdict(list) + # data-structure for breadth-first search + queue = [] + head = 0 -def split_host_to_chip_connections(networks, conns): - for conn in conns: - if conn in networks: - # Already processed - continue - - pre_loc = networks.location(conn.pre_obj) - post_loc = networks.location(conn.post_obj) - if pre_loc == "host" and post_loc == "chip": - if isinstance(conn.pre_obj, Neurons): - split_host_neurons_to_chip(networks, conn) - else: - split_host_to_chip(networks, conn) - assert conn in networks - - -def split_host_neurons_to_chip(networks, conn): - """Send spikes over and do the rest of the connection on-chip""" - - assert not isinstance(conn.post, LearningRule) - dim = conn.size_in - - logger.debug("Creating ChipReceiveNeurons for %s", conn) - receive = ChipReceiveNeurons( - dim, - neuron_type=conn.pre_obj.ensemble.neuron_type, - label=None if conn.label is None else "%s_neurons" % conn.label, - add_to_container=False, - ) - networks.add(receive, "chip") - receive2post = Connection( - receive, - conn.post, - transform=conn.transform, - synapse=conn.synapse, - label=None if conn.label is None else "%s_chip" % conn.label, - add_to_container=False, - ) - networks.add(receive2post, "chip") - - logger.debug("Creating HostSendNode for %s", conn) - send = HostSendNode( - dim, - label=None if conn.label is None else "%s_send" % conn.label, - add_to_container=False, - ) - networks.add(send, "host") - pre2send = Connection( - conn.pre, - send, - synapse=None, - label=None if conn.label is None else "%s_host" % conn.label, - add_to_container=False, - ) - networks.add(pre2send, "host") - - networks.host2chip_senders[send] = receive - networks.remove(conn) - - -def split_host_to_chip(networks, conn): - dim = conn.size_out - logger.debug("Creating ChipReceiveNode for %s", conn) - receive = ChipReceiveNode( - dim * 2, - size_out=dim, - label=None if conn.label is None else "%s_node" % conn.label, - add_to_container=False, - ) - networks.add(receive, "chip") - receive2post = Connection( - receive, - conn.post, - synapse=networks.node_tau, - label=None if conn.label is None else "%s_chip" % conn.label, - add_to_container=False, - ) - networks.add(receive2post, "chip") - - logger.debug("Creating DecodeNeuron ensemble for %s", conn) - if networks.node_neurons is None: - raise BuildError( - "DecodeNeurons must be specified for host->chip connection.") - ens = networks.node_neurons.get_ensemble(dim) - ens.label = None if conn.label is None else "%s_ens" % conn.label - networks.add(ens, "host") - - if nengo_transforms is not None and isinstance( - conn.transform, nengo_transforms.Convolution): - raise BuildError( - "Conv2D transforms not supported for off-chip to " - "on-chip connections where `pre` is not a Neurons object.") - - # Scale the input spikes based on the radius of the target ensemble - seed = networks.original.seed if conn.seed is None else conn.seed - weights = sample_transform(conn, rng=np.random.RandomState(seed=seed)) - - if isinstance(conn.post_obj, Ensemble): - weights = weights / conn.post_obj.radius - - if nengo_transforms is None: - transform = weights - else: - # copy the Transform information, setting `init` to the sampled weights - transform = copy.copy(conn.transform) - type(transform).init.data[transform] = weights - - pre2ens = Connection( - conn.pre, - ens, - function=conn.function, - solver=conn.solver, - eval_points=conn.eval_points, - scale_eval_points=conn.scale_eval_points, - synapse=conn.synapse, - transform=transform, - label=None if conn.label is None else "%s_enc" % conn.label, - add_to_container=False, - ) - networks.add(pre2ens, "host") - - logger.debug("Creating HostSendNode for %s", conn) - send = HostSendNode( - dim * 2, - label=None if conn.label is None else "%s_send" % conn.label, - add_to_container=False, - ) - networks.add(send, "host") - ensneurons2send = Connection( - ens.neurons, - send, - synapse=None, - label=None if conn.label is None else "%s_host" % conn.label, - add_to_container=False, - ) - networks.add(ensneurons2send, "host") - networks.remove(conn) - - networks.host2chip_senders[send] = receive - - -def split_chip_to_host_connections(networks, conns): - for conn in conns: - if conn in networks: - # Already processed - continue - - pre_loc = networks.location(conn.pre_obj) - post_loc = networks.location(conn.post_obj) - # All other connections should be processed by this point - if pre_loc == "chip" and post_loc == "host": - split_chip_to_host(networks, conn) - assert conn in networks - - -def split_chip_to_host(networks, conn): - dim = conn.size_out - - logger.debug("Creating HostReceiveNode for %s", conn) - receive = HostReceiveNode( - dim, - label=None if conn.label is None else "%s_receive" % conn.label, - add_to_container=False, - ) - networks.add(receive, "host") - receive2post = Connection( - receive, - conn.post, - synapse=conn.synapse, - label=None if conn.label is None else "%s_host" % conn.label, - add_to_container=False, - ) - networks.add(receive2post, "host") - - logger.debug("Creating Probe for %s", conn) - seed = networks.original.seed if conn.seed is None else conn.seed - transform = sample_transform(conn, rng=np.random.RandomState(seed=seed)) - - probe = Probe(conn.pre, - synapse=None, - solver=conn.solver, - add_to_container=False) - networks.chip2host_params[probe] = dict( - learning_rule_type=conn.learning_rule_type, - function=conn.function, - eval_points=conn.eval_points, - scale_eval_points=conn.scale_eval_points, - transform=transform, - label=None if conn.label is None else "%s_probe" % conn.label, - ) - networks.add(probe, "chip") - networks.chip2host_receivers[probe] = receive - - if conn.learning_rule_type is not None: - if not isinstance(conn.pre_obj, Ensemble): - raise NotImplementedError( - "Learning rule presynaptic object must be an Ensemble " - "(got %r)" % type(conn.pre_obj).__name__) - networks.needs_sender[conn.learning_rule] = PESModulatoryTarget(probe) - networks.remove(conn) - - -def split_host_to_learning_rules(networks, conns): - for conn in conns: - if conn in networks: - # Already processed - continue - - pre_loc = networks.location(conn.pre_obj) - if (pre_loc == "host" - and isinstance(conn.post_obj, LearningRule)): - split_host_to_learning_rule(networks, conn) - assert conn in networks - - -def split_host_to_learning_rule(networks, conn): - dim = conn.size_out - logger.debug("Creating HostSendNode for %s", conn) - send = HostSendNode( - dim, - label=None if conn.label is None else "%s_send" % conn.label, - add_to_container=False, - ) - networks.add(send, "host") - - pre2send = Connection( - conn.pre, - send, - function=conn.function, - solver=conn.solver, - eval_points=conn.eval_points, - scale_eval_points=conn.scale_eval_points, - synapse=conn.synapse, - transform=conn.transform, - label=conn.label, - add_to_container=False, - ) - networks.add(pre2send, "host") - pes_target = networks.needs_sender[conn.post_obj] - networks.host2chip_senders[send] = pes_target - networks.remove(conn) - - -def place_probes(networks): - for probe in networks.original.all_probes: - target = base_obj(probe.target) - networks.move(probe, networks.location(target)) - - -def split_pre_from_host(networks): # noqa: C901 - logger.info("Splitting pre model from host") - - inputs = defaultdict(list) - outputs = defaultdict(list) - queue = [] - - for d in [networks.moves, networks.adds]: - for obj in d: - if isinstance(obj, Connection): - inputs[base_obj(obj.post_obj)].append(obj) - outputs[base_obj(obj.pre_obj)].append(obj) - elif isinstance(obj, HostSendNode): - networks.move(obj, "host_pre", force=True) + def mark_precomputable(obj): + assert isinstance(obj, (Node, Ensemble)) + if obj not in precomputable: + precomputable.add(obj) queue.append(obj) - while len(queue) > 0: - node_or_ens = queue.pop() - - for conn in inputs[node_or_ens] + outputs[node_or_ens]: - if networks.location(conn) != "host": - continue - networks.move(conn, "host_pre", force=True) - - if conn in inputs[node_or_ens]: - obj = base_obj(conn.pre_obj) - elif conn in outputs[node_or_ens]: - obj = base_obj(conn.post_obj) - - if (isinstance(obj, (Node, Ensemble)) - and networks.location(obj) == "host"): - if isinstance(obj, HostReceiveNode): + # initialize queue with the pre objects on + # host -> chip connections + for conn in self.network.all_connections: + pre, post = base_obj(conn.pre), base_obj(conn.post) + pre_to_conn[pre].append(conn) + post_to_conn[post].append(conn) + if self.on_chip(post) and not self.on_chip(pre): + mark_precomputable(pre) + + # traverse all connected objects breadth-first + while head < len(queue): + node_or_ens = queue[head] + head += 1 + + # handle forwards adjacencies + for conn in pre_to_conn[node_or_ens]: + assert base_obj(conn.pre) is node_or_ens + if not isinstance(conn.post, LearningRule): + post = base_obj(conn.post) + if not self.on_chip(post): + mark_precomputable(post) + + # handle backwards adjacencies + for conn in post_to_conn[node_or_ens]: + assert base_obj(conn.post) is node_or_ens + pre = base_obj(conn.pre) + if self.on_chip(pre): raise BuildError("Cannot precompute input, " "as it is dependent on output") - networks.move(obj, "host_pre", force=True) - queue.append(obj) + mark_precomputable(pre) + + return precomputable + + def on_chip(self, obj): + if isinstance(obj, Probe): + obj = base_obj(obj.target) + if not isinstance(obj, (Ensemble, Node)): + raise TypeError("Locations are only established for ensembles ", + "nodes, and probes -- not for %r" % (obj,)) + if obj not in self._seen_objects: + raise IndexError("Object (%r) is not a part of the network" + % (obj,)) + return obj in self._chip_objects + + def is_precomputable(self, obj): + if isinstance(obj, Probe): + obj = base_obj(obj.target) + return (not self.on_chip(obj) + and obj in self._host_precomputable_objects) diff --git a/nengo_loihi/tests/test_passthrough.py b/nengo_loihi/tests/test_passthrough.py index e187f67a0..8e91b037a 100644 --- a/nengo_loihi/tests/test_passthrough.py +++ b/nengo_loihi/tests/test_passthrough.py @@ -3,11 +3,9 @@ import numpy as np import pytest -import nengo_loihi -from nengo_loihi.builder.inputs import ChipReceiveNode from nengo_loihi.compat import transform_array from nengo_loihi.decode_neurons import OnOffDecodeNeurons -from nengo_loihi.splitter import split +from nengo_loihi.passthrough import convert_passthroughs default_node_neurons = OnOffDecodeNeurons() @@ -24,32 +22,23 @@ def test_passthrough_placement(): g = nengo.Node(None, size_in=1) # should be off-chip nengo.Connection(stim, a) nengo.Connection(a, b) - nengo.Connection(b, c) - nengo.Connection(c, d) - nengo.Connection(d, e) - nengo.Connection(e, f) + conn_bc = nengo.Connection(b, c) + conn_cd = nengo.Connection(c, d) + conn_de = nengo.Connection(d, e) + conn_ef = nengo.Connection(e, f) nengo.Connection(f, g) nengo.Probe(g) - nengo_loihi.add_params(model) - networks = split(model, - precompute=False, - node_neurons=default_node_neurons, - node_tau=0.005, - remove_passthrough=True) - chip = networks.chip - host = networks.host - - assert a in host.nodes - assert a not in chip.nodes - assert c not in host.nodes - assert c not in chip.nodes - assert d not in host.nodes - assert d not in chip.nodes - assert e not in host.nodes - assert e not in chip.nodes - assert g in host.nodes - assert g not in chip.nodes + passthrough_directive = convert_passthroughs(model, ignore={stim}) + + assert passthrough_directive.removed_passthroughs == {c, d, e} + assert passthrough_directive.removed_connections == { + conn_bc, conn_cd, conn_de, conn_ef} + + conns = list(passthrough_directive.added_connections) + assert len(conns) == 1 + assert conns[0].pre is b + assert conns[0].post is f @pytest.mark.parametrize("d1", [1, 3]) @@ -64,20 +53,17 @@ def test_transform_merging(d1, d2, d3): t1 = np.random.uniform(-1, 1, (d2, d1)) t2 = np.random.uniform(-1, 1, (d3, d2)) - nengo.Connection(a, b, transform=t1) - nengo.Connection(b, c, transform=t2) + conn_ab = nengo.Connection(a, b, transform=t1) + conn_bc = nengo.Connection(b, c, transform=t2) - nengo_loihi.add_params(model) - networks = split(model, - precompute=False, - node_neurons=default_node_neurons, - node_tau=0.005, - remove_passthrough=True) - chip = networks.chip + passthrough_directive = convert_passthroughs(model, ignore=set()) - assert len(chip.connections) == 1 - conn = chip.connections[0] - assert np.allclose(transform_array(conn.transform), np.dot(t2, t1)) + assert passthrough_directive.removed_passthroughs == {b} + assert passthrough_directive.removed_connections == {conn_ab, conn_bc} + + conns = list(passthrough_directive.added_connections) + assert len(conns) == 1 + assert np.allclose(transform_array(conns[0].transform), np.dot(t2, t1)) @pytest.mark.parametrize("n_ensembles", [1, 3]) @@ -88,22 +74,14 @@ def test_identity_array(n_ensembles, ens_dimensions): b = nengo.networks.EnsembleArray(10, n_ensembles, ens_dimensions) nengo.Connection(a.output, b.input) - nengo_loihi.add_params(model) - networks = split(model, - precompute=False, - node_neurons=default_node_neurons, - node_tau=0.005, - remove_passthrough=True) + passthrough_directive = convert_passthroughs(model, ignore=set()) - # ignore the a.input -> a.ensemble connections - connections = [conn for conn in networks.chip.connections - if not (isinstance(conn.pre_obj, ChipReceiveNode) - and conn.post_obj in a.ensembles)] + conns = list(passthrough_directive.added_connections) + assert len(conns) == n_ensembles - assert len(connections) == n_ensembles pre = set() post = set() - for conn in connections: + for conn in conns: assert conn.pre in a.all_ensembles or conn.pre_obj is a.input assert conn.post in b.all_ensembles assert np.allclose(transform_array(conn.transform), @@ -123,21 +101,13 @@ def test_full_array(n_ensembles, ens_dimensions): D = n_ensembles * ens_dimensions nengo.Connection(a.output, b.input, transform=np.ones((D, D))) - nengo_loihi.add_params(model) - networks = split(model, - precompute=False, - node_neurons=default_node_neurons, - node_tau=0.005, - remove_passthrough=True) + passthrough_directive = convert_passthroughs(model, ignore=set()) - # ignore the a.input -> a.ensemble connections - connections = [conn for conn in networks.chip.connections - if not (isinstance(conn.pre_obj, ChipReceiveNode) - and conn.post_obj in a.ensembles)] + conns = list(passthrough_directive.added_connections) + assert len(conns) == n_ensembles ** 2 - assert len(connections) == n_ensembles ** 2 pairs = set() - for conn in connections: + for conn in conns: assert conn.pre in a.all_ensembles assert conn.post in b.all_ensembles assert np.allclose(transform_array(conn.transform), @@ -158,26 +128,18 @@ def test_synapse_merging(Simulator, seed): nengo.Connection(b[1], c.input[0], synapse=None) nengo.Connection(b[1], c.input[1], synapse=0.2) - nengo_loihi.add_params(model) - networks = split(model, - precompute=False, - node_neurons=default_node_neurons, - node_tau=0.005, - remove_passthrough=True) + passthrough_directive = convert_passthroughs(model, ignore=set()) - # ignore the a.input -> a.ensemble connections - connections = [conn for conn in networks.chip.connections - if not (isinstance(conn.pre_obj, ChipReceiveNode) - and conn.post_obj in a.ensembles)] + conns = list(passthrough_directive.added_connections) + assert len(conns) == 4 - assert len(connections) == 4 desired_filters = { ('0', '0'): None, ('0', '1'): 0.2, ('1', '0'): 0.1, ('1', '1'): 0.3, } - for conn in connections: + for conn in conns: if desired_filters[(conn.pre.label, conn.post.label)] is None: assert conn.synapse is None else: @@ -186,9 +148,13 @@ def test_synapse_merging(Simulator, seed): conn.synapse.tau, desired_filters[(conn.pre.label, conn.post.label)]) - # check that model builds/runs correctly - with Simulator(model, remove_passthrough=True) as sim: - sim.step() + # check that model builds/runs, and issues the warning + with pytest.warns(UserWarning) as record: + with Simulator(model, remove_passthrough=True) as sim: + sim.step() + + assert any("Combining two Lowpass synapses" in r.message.args[0] + for r in record) def test_no_input(Simulator, seed, allclose): diff --git a/nengo_loihi/tests/test_simulator.py b/nengo_loihi/tests/test_simulator.py index 080a11739..43586165a 100644 --- a/nengo_loihi/tests/test_simulator.py +++ b/nengo_loihi/tests/test_simulator.py @@ -330,11 +330,11 @@ def test_tau_s_warning(Simulator): with pytest.warns(UserWarning) as record: with Simulator(net): pass - # The 0.001 synapse is applied first due to splitting rules putting - # the stim -> ens connection later than the ens -> ens connection + assert any(rec.message.args[0] == ( - "tau_s is currently 0.001, which is smaller than 0.005. " - "Overwriting tau_s with 0.005.") for rec in record) + "tau_s is already set to 0.005, which is larger than 0.001. " + "Using 0.005." + ) for rec in record) with net: nengo.Connection(ens, ens, @@ -343,9 +343,10 @@ def test_tau_s_warning(Simulator): with pytest.warns(UserWarning) as record: with Simulator(net): pass + assert any(rec.message.args[0] == ( - "tau_s is already set to 0.1, which is larger than 0.005. Using 0.1." - ) for rec in record) + "tau_s is currently 0.005, which is smaller than 0.1. " + "Overwriting tau_s with 0.1.") for rec in record) @pytest.mark.xfail(nengo.version.version_info <= (2, 8, 0), @@ -355,22 +356,22 @@ def test_seeds(precompute, Simulator, seed): with nengo.Network(seed=seed) as net: nengo_loihi.add_params(net) - e0 = nengo.Ensemble(1, 1) - e1 = nengo.Ensemble(1, 1, seed=2) - e2 = nengo.Ensemble(1, 1) + e0 = nengo.Ensemble(1, 1, label="e0") + e1 = nengo.Ensemble(1, 1, seed=2, label="e1") + e2 = nengo.Ensemble(1, 1, label="e2") net.config[e2].on_chip = False nengo.Connection(e0, e1) nengo.Connection(e0, e2) with nengo.Network(): n = nengo.Node(0) - e = nengo.Ensemble(1, 1) + e = nengo.Ensemble(1, 1, label="e") nengo.Node(1) nengo.Connection(n, e) nengo.Probe(e) with nengo.Network(seed=8): - nengo.Ensemble(8, 1, seed=3) + nengo.Ensemble(8, 1, seed=3, label="unnamed") nengo.Node(1) # --- test that seeds are the same as nengo ref simulator @@ -378,40 +379,19 @@ def test_seeds(precompute, Simulator, seed): with Simulator(net, precompute=precompute) as sim: for obj in net.all_objects: - on_chip = (not isinstance(obj, nengo.Node) and ( - not isinstance(obj, nengo.Ensemble) - or net.config[obj].on_chip)) - - seed = sim.model.seeds.get(obj, None) - assert seed is None or seed == ref.model.seeds[obj] - if on_chip: - assert seed is not None - if obj in sim.model.seeded: - assert sim.model.seeded[obj] == ref.model.seeded[obj] - - if precompute: - seed0 = sim.sims["host_pre"].model.seeds.get(obj, None) - assert seed0 is None or seed0 == ref.model.seeds[obj] - seed1 = sim.sims["host"].model.seeds.get(obj, None) - assert seed1 is None or seed1 == ref.model.seeds[obj] - else: - seed0 = sim.sims["host"].model.seeds.get(obj, None) - assert seed0 is None or seed0 == ref.model.seeds[obj] - seed1 = None - - if not on_chip: - assert seed0 is not None or seed1 is not None + submodel = sim.model.delegate(obj) + seed = submodel.seeds.get(obj, None) + assert seed == ref.model.seeds.get(obj, None) # --- test that seeds that we set are preserved after splitting model = nengo_loihi.builder.Model() - for i, o in enumerate(net.all_objects): - model.seeds[o] = i + for i, obj in enumerate(net.all_objects): + model.seeds[obj] = i with Simulator(net, model=model, precompute=precompute) as sim: - for i, o in enumerate(net.all_objects): - for name, subsim in sim.sims.items(): - if name.startswith("host"): - assert subsim.model.seeds[o] == i + for i, obj in enumerate(net.all_objects): + submodel = sim.model.delegate(obj) + assert submodel.seeds[obj] == i def test_interface(Simulator, allclose): @@ -627,8 +607,6 @@ def test_population_input(request, allclose): assert allclose(z[[1, 3, 5]], weights[0], atol=4e-2, rtol=0) -@pytest.mark.skipif(pytest.config.getoption("--target") != "loihi", - reason="Loihi only test") def test_precompute(allclose, Simulator, seed, plt): simtime = 0.2 @@ -664,9 +642,46 @@ def test_precompute(allclose, Simulator, seed, plt): plt.plot(sim2.trange(), sim2.data[p_out]) plt.title('precompute=True') + # check that each is using the right placement + assert stim in sim1.model.host.params + assert stim not in sim1.model.host_pre.params + assert stim not in sim2.model.host.params + assert stim in sim2.model.host_pre.params + + assert p_stim not in sim1.model.params + assert p_stim in sim1.model.host.params + assert p_stim not in sim1.model.host_pre.params + + assert p_stim not in sim2.model.params + assert p_stim not in sim2.model.host.params + assert p_stim in sim2.model.host_pre.params + + for sim in (sim1, sim2): + assert a in sim.model.params + assert a not in sim.model.host.params + assert a not in sim.model.host_pre.params + + assert output not in sim.model.params + assert output in sim.model.host.params + assert output not in sim.model.host_pre.params + + assert p_a in sim.model.params + assert p_a not in sim.model.host.params + assert p_a not in sim.model.host_pre.params + + assert p_out not in sim.model.params + assert p_out in sim.model.host.params + assert p_out not in sim.model.host_pre.params + assert np.array_equal(sim1.data[p_stim], sim2.data[p_stim]) - assert allclose(sim1.data[p_a], sim2.data[p_a], atol=0.2) - assert allclose(sim1.data[p_out], sim2.data[p_out], atol=0.2) + assert sim1.target == sim2.target + if sim1.target != "loihi": + # precompute should not make a difference for the emulator outputs + assert allclose(sim1.data[p_a], sim2.data[p_a]) + assert allclose(sim1.data[p_out], sim2.data[p_out]) + else: + assert allclose(sim1.data[p_a], sim2.data[p_a], atol=0.2) + assert allclose(sim1.data[p_out], sim2.data[p_out], atol=0.2) @pytest.mark.skipif(pytest.config.getoption("--target") != "loihi", @@ -719,3 +734,105 @@ def test_input_node_precompute(allclose, Simulator, plt): plt.legend(loc='best') assert allclose(x['sim'], x['loihi'], atol=0.1, rtol=0.01) + + +@pytest.mark.parametrize("remove_passthrough", [True, False]) +def test_simulator_passthrough(remove_passthrough, Simulator): + with nengo.Network() as model: + host_input = nengo.Node(0) + host_a = nengo.Node(size_in=1) + host_b = nengo.Node(size_in=1) + + chip_x = nengo.Ensemble(10, 1) + remove_c = nengo.Node(size_in=1) + chip_y = nengo.Ensemble(10, 1) + + host_d = nengo.Node(size_in=1) + + conn_input_a = nengo.Connection(host_input, host_a) + conn_a_b = nengo.Connection(host_a, host_b) + conn_b_x = nengo.Connection(host_b, chip_x) + conn_x_c = nengo.Connection(chip_x, remove_c) + conn_c_y = nengo.Connection(remove_c, chip_y) + conn_y_d = nengo.Connection(chip_y, host_d) + + probe_y = nengo.Probe(chip_y) + probe_d = nengo.Probe(host_d) + + with Simulator(model, remove_passthrough=remove_passthrough) as sim: + pass + + assert host_input in sim.model.host.params + assert probe_d in sim.model.host.params + + assert chip_x in sim.model.params + assert chip_y in sim.model.params + assert probe_y in sim.model.params + + # Passthrough nodes are not removed on the host + assert host_a in sim.model.host.params + assert host_b in sim.model.host.params + assert host_d in sim.model.host.params + assert conn_input_a in sim.model.host.params + assert conn_a_b in sim.model.host.params + + if remove_passthrough: + assert remove_c not in sim.model.host.params + else: + assert remove_c in sim.model.host.params + + # These connections currently aren't built in either case + for model in (sim.model, sim.model.host): + assert conn_b_x not in model.params + assert conn_x_c not in model.params + assert conn_c_y not in model.params + assert conn_y_d not in model.params + + +def test_slicing_bugs(Simulator, seed): + + n = 50 + with nengo.Network() as model: + a = nengo.Ensemble(n, 1, label="a") + p0 = nengo.Probe(a[0]) + p = nengo.Probe(a) + + with Simulator(model) as sim: + sim.run(0.1) + + assert np.allclose(sim.data[p0], sim.data[p]) + assert a in sim.model.params + assert a not in sim.model.host.params + + with nengo.Network() as model: + nengo_loihi.add_params(model) + + a = nengo.Ensemble(n, 1, label="a") + + b0 = nengo.Ensemble(n, 1, label="b0", seed=seed) + model.config[b0].on_chip = False + nengo.Connection(a[0], b0) + + b = nengo.Ensemble(n, 1, label="b", seed=seed) + model.config[b].on_chip = False + nengo.Connection(a, b) + + p0 = nengo.Probe(b0) + p = nengo.Probe(b) + + with Simulator(model) as sim: + sim.run(0.1) + + assert np.allclose(sim.data[p0], sim.data[p]) + assert a in sim.model.params + assert a not in sim.model.host.params + assert b not in sim.model.params + assert b in sim.model.host.params + + +def test_network_unchanged(Simulator): + with nengo.Network() as model: + nengo.Ensemble(100, 1) + with Simulator(model): + pass + assert model.all_networks == [] diff --git a/nengo_loihi/tests/test_splitter.py b/nengo_loihi/tests/test_splitter.py index 83ca9d3e9..b003251e6 100644 --- a/nengo_loihi/tests/test_splitter.py +++ b/nengo_loihi/tests/test_splitter.py @@ -1,99 +1,40 @@ -from distutils.version import LooseVersion - import pytest import nengo from nengo.exceptions import BuildError import numpy as np -from nengo_loihi.builder.inputs import ( - ChipReceiveNeurons, - ChipReceiveNode, - HostReceiveNode, - HostSendNode, - PESModulatoryTarget, -) -from nengo_loihi.decode_neurons import OnOffDecodeNeurons from nengo_loihi.config import add_params -from nengo_loihi.splitter import ( - place_ensembles, - place_internetwork_connections, - place_nodes, - place_probes, - SplitNetworks, - split, - split_chip_to_host, - split_host_neurons_to_chip, - split_host_to_chip, - split_host_to_learning_rules, - split_pre_from_host, -) - -default_node_neurons = OnOffDecodeNeurons() - - -@pytest.mark.parametrize("pre_dims", [1, 3]) -@pytest.mark.parametrize("post_dims", [1, 3]) -@pytest.mark.parametrize("learn", [True, False]) -@pytest.mark.parametrize("use_solver", [True, False]) -def test_manual_decoders( - seed, Simulator, pre_dims, post_dims, learn, use_solver): - - with nengo.Network(seed=seed) as model: - pre = nengo.Ensemble(50, dimensions=pre_dims, - gain=np.ones(50), - bias=np.ones(50) * 5) - post = nengo.Node(size_in=post_dims) - - learning_rule_type = nengo.PES() if learn else None - weights = np.zeros((post_dims, 50)) - if use_solver: - conn = nengo.Connection(pre, post, - function=lambda x: np.zeros(post_dims), - learning_rule_type=learning_rule_type, - solver=nengo.solvers.NoSolver(weights.T)) - else: - conn = nengo.Connection(pre.neurons, post, - learning_rule_type=learning_rule_type, - transform=weights) - - if learn: - error = nengo.Node(np.zeros(post_dims)) - nengo.Connection(error, conn.learning_rule) - - pre_probe = nengo.Probe(pre.neurons, synapse=None) - post_probe = nengo.Probe(post, synapse=None) - - if not use_solver and learn: - with pytest.raises(NotImplementedError): - with Simulator(model) as sim: - pass - else: - with Simulator(model) as sim: - sim.run(0.1) - - # Ensure pre population has a lot of activity - assert np.mean(sim.data[pre_probe]) > 100 - # But that post has no activity due to the zero weights - assert np.all(sim.data[post_probe] == 0) +from nengo_loihi.splitter import SplitterDirective def test_place_nodes(): + # all nodes go on the host + # ChipReceiveNodes and HostSendNodes are created later by the builder + with nengo.Network() as net: + add_params(net) offchip1 = nengo.Node(0) with nengo.Network(): offchip2 = nengo.Node(np.sin) - offchip3 = HostSendNode(dimensions=1) - onchip = ChipReceiveNode(dimensions=1, size_out=1) + ensemble = nengo.Ensemble(100, 1) + offchip3 = nengo.Node(size_in=1) + nengo.Connection(ensemble, offchip3) - networks = SplitNetworks(net, node_neurons=default_node_neurons) - place_nodes(networks) - assert networks.moves[offchip1] == "host" - assert networks.moves[offchip2] == "host" - assert networks.moves[offchip3] == "host" - assert networks.moves[onchip] == "chip" + with nengo.Network(): + nowhere = nengo.Node(0) + + splitter_directive = SplitterDirective(net) + assert not splitter_directive.on_chip(offchip1) + assert not splitter_directive.on_chip(offchip2) + assert not splitter_directive.on_chip(offchip3) + + with pytest.raises(IndexError, match="not a part of the network"): + splitter_directive.on_chip(nowhere) def test_place_ensembles(): + # builder will move the learning stuff onto the host + with nengo.Network() as net: add_params(net) offchip = nengo.Ensemble(10, 1, label="offchip") @@ -108,352 +49,147 @@ def test_place_ensembles(): conn = nengo.Connection(pre, post, learning_rule_type=nengo.PES()) nengo.Connection(error, conn.learning_rule) - networks = SplitNetworks(net, node_neurons=default_node_neurons) - place_ensembles(networks) - assert networks.moves[offchip] == "host" - assert networks.moves[direct] == "host" - assert networks.moves[onchip] == "chip" - assert networks.moves[pre] == "chip" - assert networks.moves[post] == "host" - assert networks.moves[error] == "host" + splitter_directive = SplitterDirective(net) + assert not splitter_directive.on_chip(offchip) + assert not splitter_directive.on_chip(direct) + assert splitter_directive.on_chip(onchip) + assert splitter_directive.on_chip(pre) + assert not splitter_directive.on_chip(post) + assert not splitter_directive.on_chip(error) + + for obj in net.all_ensembles + net.all_nodes: + assert not splitter_directive.is_precomputable(obj) + with pytest.raises(TypeError, match="Locations are only established"): + splitter_directive.on_chip(conn) -def test_place_inter_network_connection(): + +def test_place_internetwork_connections(): with nengo.Network() as net: + add_params(net) offchip = nengo.Ensemble(10, 1) + net.config[offchip].on_chip = False onchip = nengo.Ensemble(10, 1) + onon = nengo.Connection(onchip, onchip) onoff = nengo.Connection(onchip, offchip) offon = nengo.Connection(offchip, onchip) offoff = nengo.Connection(offchip, offchip) - networks = SplitNetworks(net, node_neurons=default_node_neurons) - networks.move(onchip, "chip") - networks.move(offchip, "host") - - place_internetwork_connections(networks, networks.original.all_connections) - assert onoff not in networks - assert offon not in networks - assert networks.location(onon) == "chip" - assert networks.location(offoff) == "host" - - -def test_split_host_neurons_to_chip(): - with nengo.Network() as net: - offchip = nengo.Ensemble(10, 1) - onchip = nengo.Ensemble(10, 1) - neurons2neurons = nengo.Connection( - offchip.neurons, onchip.neurons, transform=np.ones((10, 10))) - neurons2ensemble = nengo.Connection( - offchip.neurons, onchip, transform=np.ones((1, 10))) - - networks = SplitNetworks(net, node_neurons=default_node_neurons) - networks.move(offchip, "host") - networks.move(onchip, "chip") - - def assert_split_correctly(split_conn): - assert len(networks.adds) == 4 - added_types = sorted([(type(obj).__name__, location) - for obj, location in networks.adds.items()]) - assert added_types == [ - ("ChipReceiveNeurons", "chip"), - ("Connection", "chip"), - ("Connection", "host"), - ("HostSendNode", "host"), - ] - assert split_conn in networks.removes - - send = next(obj for obj in networks.adds - if isinstance(obj, HostSendNode)) - receive = next(obj for obj in networks.adds - if isinstance(obj, ChipReceiveNeurons)) - assert networks.host2chip_senders[send] is receive - - split_host_neurons_to_chip(networks, neurons2neurons) - assert_split_correctly(neurons2neurons) - networks.adds.clear() # Makes testing subsequent adds easier - split_host_neurons_to_chip(networks, neurons2ensemble) - assert_split_correctly(neurons2ensemble) - - -def test_split_host_to_chip(): - with nengo.Network() as net: - ens_offchip = nengo.Ensemble(10, 1) - node_offchip = nengo.Node(np.sin) - ens_onchip = nengo.Ensemble(10, 1) - connections = [ - nengo.Connection(ens_offchip, ens_onchip), - nengo.Connection(node_offchip, ens_onchip), - nengo.Connection( - ens_offchip, ens_onchip.neurons, transform=np.ones((10, 1))), - nengo.Connection( - node_offchip, ens_onchip.neurons, transform=np.ones((10, 1))), - ] + splitter_directive = SplitterDirective(net) - networks = SplitNetworks(net, node_neurons=default_node_neurons) - networks.move(ens_offchip, "host") - networks.move(node_offchip, "host") - networks.move(ens_onchip, "chip") - - for conn in connections: - split_host_to_chip(networks, conn) - for added in networks.adds: - if isinstance(added, nengo.Ensemble): - ens = added - elif isinstance(added, ChipReceiveNode): - receive = added - elif isinstance(added, HostSendNode): - send = added - # Otherwise must be connection - elif added.pre is conn.pre: - pre2ens = added - elif added.post is conn.post: - receive2post = added - else: - ensneurons2send = added - - assert networks.location(ens) == "host" - assert isinstance(ens.neuron_type, nengo.SpikingRectifiedLinear) - assert pre2ens.post is ens - - assert networks.location(receive) == "chip" - assert networks.location(receive2post) == "chip" - assert receive2post.pre is receive - - assert networks.location(send) == "host" - assert networks.location(ensneurons2send) == "host" - assert ensneurons2send.pre == ens.neurons - assert ensneurons2send.post is send - - assert conn in networks.removes - networks.adds.clear() # makes next loop iteration easier - - -def test_split_no_node_neuron_error(): - with nengo.Network() as net: - add_params(net) - node_offchip = nengo.Node(np.sin) - ens_onchip = nengo.Ensemble(10, 1) - nengo.Connection(node_offchip, ens_onchip) + assert splitter_directive.on_chip(onon.pre) + assert splitter_directive.on_chip(onon.post) - with pytest.raises(BuildError, match="DecodeNeurons"): - split(net, precompute=False, node_neurons=None, node_tau=None) + assert splitter_directive.on_chip(onoff.pre) + assert not splitter_directive.on_chip(onoff.post) + assert not splitter_directive.on_chip(offon.pre) + assert splitter_directive.on_chip(offon.post) -def test_split_chip_to_host(): - with nengo.Network() as net: - ens_onchip = nengo.Ensemble(10, 1) - ens_offchip = nengo.Ensemble(10, 1) - node_offchip = nengo.Node(size_in=1) - connections = [ - nengo.Connection(ens_onchip, ens_offchip), - nengo.Connection( - ens_onchip, ens_offchip, learning_rule_type=nengo.PES()), - nengo.Connection(ens_onchip, node_offchip), - nengo.Connection( - ens_onchip.neurons, ens_offchip, transform=np.ones((1, 10))), - nengo.Connection( - ens_onchip.neurons, node_offchip, transform=np.ones((1, 10))), - ] - connections.append( - nengo.Connection(ens_onchip, connections[1].learning_rule) - ) - - networks = SplitNetworks(net, node_neurons=default_node_neurons) - networks.move(ens_onchip, "chip") - networks.move(ens_offchip, "host") - networks.move(node_offchip, "host") - - for conn in connections: - split_chip_to_host(networks, conn) - for added in networks.adds: - if isinstance(added, HostReceiveNode): - receive = added - elif isinstance(added, nengo.Probe): - probe = added - else: - assert added.post is conn.post - receive2post = added - - assert networks.location(receive) == "host" - assert networks.location(receive2post) == "host" - assert receive2post.pre is receive - - assert networks.location(probe) == "chip" - assert probe.target is conn.pre or probe.target is conn.pre.ensemble - assert probe.synapse is None - assert probe in networks.chip2host_params - assert probe in networks.chip2host_receivers - assert networks.chip2host_receivers[probe] is receive - if conn.learning_rule_type is not None: - assert conn.learning_rule in networks.needs_sender - assert isinstance(networks.needs_sender[conn.learning_rule], - PESModulatoryTarget) - - assert conn in networks.removes - networks.adds.clear() # makes next loop iteration easier + assert not splitter_directive.on_chip(offoff.pre) + assert not splitter_directive.on_chip(offoff.post) def test_split_host_to_learning_rule(): with nengo.Network() as net: + add_params(net) pre = nengo.Ensemble(10, 1, label="pre") post = nengo.Ensemble(10, 1, label="post") err_onchip = nengo.Ensemble(10, 1, label="err_onchip") err_offchip = nengo.Ensemble(10, 1, label="err_offchip") + net.config[err_offchip].on_chip = False ens_conn = nengo.Connection(pre, post, learning_rule_type=nengo.PES()) neurons_conn = nengo.Connection(pre.neurons, post.neurons, learning_rule_type=nengo.PES()) - on2on_ens = nengo.Connection(err_onchip, ens_conn.learning_rule) - on2on_neurons = nengo.Connection( + nengo.Connection(err_onchip, ens_conn.learning_rule) + nengo.Connection( err_onchip, neurons_conn.learning_rule) - off2on_ens = nengo.Connection(err_offchip, ens_conn.learning_rule) - off2on_neurons = nengo.Connection( + nengo.Connection(err_offchip, ens_conn.learning_rule) + nengo.Connection( err_offchip, neurons_conn.learning_rule) - networks = SplitNetworks(net, node_neurons=default_node_neurons) - networks.move(pre, "chip") - networks.move(post, "chip") - networks.move(err_onchip, "chip") - networks.move(err_offchip, "host") - networks.move(ens_conn, "chip") - networks.move(neurons_conn, "chip") - networks.needs_sender[ens_conn.learning_rule] = "ens_pes_target" - networks.needs_sender[neurons_conn.learning_rule] = "neurons_pes_target" - - split_host_to_learning_rules(networks, networks.original.all_connections) - assert on2on_ens not in networks - assert on2on_neurons not in networks - assert sorted([type(obj).__name__ for obj in networks.adds]) == [ - "Connection", "Connection", "HostSendNode", "HostSendNode", - ] - assert off2on_ens in networks.removes - assert "ens_pes_target" in list(networks.host2chip_senders.values()) - assert off2on_neurons in networks.removes - assert "neurons_pes_target" in list(networks.host2chip_senders.values()) + splitter_directive = SplitterDirective(net) + + assert splitter_directive.on_chip(pre) + assert not splitter_directive.on_chip(post) + + assert not splitter_directive.on_chip(err_onchip) + assert not splitter_directive.on_chip(err_offchip) def test_place_probes(): with nengo.Network() as net: + add_params(net) offchip1 = nengo.Node(0) with nengo.Network(): onchip1 = nengo.Ensemble(10, 1) offchip2 = nengo.Ensemble(10, 1) + net.config[offchip2].on_chip = False onchip2 = nengo.Ensemble(10, 1) - onchip3 = nengo.Connection(onchip1, onchip2) - offchip3 = nengo.Connection(offchip1, offchip2) + nengo.Connection(onchip1, onchip2) + nengo.Connection(offchip1, offchip2) offchip_probes = [ nengo.Probe(offchip1), nengo.Probe(offchip2), - nengo.Probe(offchip3), ] onchip_probes = [ nengo.Probe(onchip1), nengo.Probe(onchip2), - nengo.Probe(onchip3), ] - networks = SplitNetworks(net, node_neurons=default_node_neurons) - for obj in [offchip1, offchip2, offchip3]: - networks.move(obj, "host") - for obj in [onchip1, onchip2, onchip3]: - networks.move(obj, "chip") - place_probes(networks) - assert all(networks.location(p) == "host" for p in offchip_probes) - assert all(networks.location(p) == "chip" for p in onchip_probes) + splitter_directive = SplitterDirective(net) + assert splitter_directive.on_chip(onchip1) + assert splitter_directive.on_chip(onchip2) + assert not splitter_directive.on_chip(offchip1) + assert not splitter_directive.on_chip(offchip2) + assert not any(splitter_directive.on_chip(p) for p in offchip_probes) + assert all(splitter_directive.on_chip(p) for p in onchip_probes) def test_split_pre_from_host(): with nengo.Network() as net: + add_params(net) pre_1 = nengo.Node(0, label="pre_1") pre_2 = nengo.Ensemble(10, 1, label="pre_2") pre_3 = nengo.Node(size_in=1, label="pre_3") pre_4 = nengo.Ensemble(1, 1, label="pre_4") - send = HostSendNode(dimensions=1) + pre_5 = nengo.Probe(pre_4) + onchip = nengo.Ensemble(1, 1, label="onchip") post1 = nengo.Ensemble(10, 1, label="post1") post2 = nengo.Node(size_in=1, label="post2") - pre_connections = [ - nengo.Connection(pre_1, pre_2), - nengo.Connection(pre_2, pre_3), - nengo.Connection(pre_3, pre_4), - nengo.Connection(pre_4.neurons, send), - ] - post_connections = [ - nengo.Connection(onchip, post1), - nengo.Connection(post1, post2), - ] + post3 = nengo.Probe(post2, label="post3") - networks = SplitNetworks(net, node_neurons=default_node_neurons) - for obj in [pre_1, pre_3, send, post1, post2]: - networks.move(obj, "host") - for obj in [pre_2, pre_4]: - networks.add(obj, "host") - for conn in pre_connections + post_connections: - networks.move(conn, "host") - networks.move(onchip, "chip") + nengo.Connection(pre_1, pre_2) + nengo.Connection(pre_2, pre_3) + nengo.Connection(pre_3, pre_4) + nengo.Connection(pre_4.neurons, onchip) + nengo.Connection(onchip, post1) + nengo.Connection(post1, post2) - split_pre_from_host(networks) - for obj in [pre_1, pre_2, pre_3, pre_4, send] + pre_connections: - assert networks.location(obj) == "host_pre", obj - for obj in [post1, post2] + post_connections: - assert networks.location(obj) == "host", obj - assert networks.location(onchip) == "chip" + net.config[pre_2].on_chip = False + net.config[pre_4].on_chip = False + net.config[post1].on_chip = False + splitter_directive = SplitterDirective(net, precompute=True) -def test_consistent_order(): - with nengo.Network() as model: - add_params(model) + host_precomputable = {pre_1, pre_2, pre_3, pre_4, pre_5} + for obj in host_precomputable: + assert not splitter_directive.on_chip(obj) + assert splitter_directive.is_precomputable(obj) - u0 = nengo.Node(0, label="u0") - for i in range(5): - e = nengo.Ensemble(i+1, 1, label="e%d" % i) - f = nengo.Ensemble(i+1, 1, label="f%d" % i) - nengo.Connection(u0, e, label="c0%d" % i) - nengo.Connection(e, f, label="cf%d" % i) - nengo.Probe(e) - nengo.Probe(f.neurons) - - # Test splitting a number of times, making sure the order of things matches - # the original network each time - split_params = dict( - precompute=False, - node_neurons=OnOffDecodeNeurons(dt=0.001), - node_tau=0.005, - remove_passthrough=False, - ) - - networks0 = split(model, **split_params) - for _ in range(5): - networks = split(model, **split_params) - - # --- order matches original network - assert len(model.all_ensembles) == len(networks.chip.all_ensembles) - for ea, eb in zip(model.all_ensembles, networks.chip.all_ensembles): - assert ea.n_neurons == eb.n_neurons and ea.label == eb.label - - # --- order matches previous split - for attr in ('connections', 'ensembles', 'nodes', 'probes'): - for net in ('host_pre', 'host', 'chip'): - aa = getattr(getattr(networks0, net), 'all_' + attr) - bb = getattr(getattr(networks, net), 'all_' + attr) - for a, b in zip(aa, bb): - assert a.label == b.label - - -@pytest.mark.skipif(LooseVersion(nengo.__version__) <= LooseVersion('2.8.0'), - reason="requires more recent Nengo version") -def test_split_conv2d_transform_error(): - with nengo.Network() as net: - add_params(net) - node_offchip = nengo.Node([1]) - ens_onchip = nengo.Ensemble(10, 1) - conv2d = nengo.Convolution( - n_filters=1, input_shape=(1, 1, 1), kernel_size=(1, 1)) - nengo.Connection(node_offchip, ens_onchip, transform=conv2d) + host_nonprecomputable = {post1, post2, post3} + for obj in host_nonprecomputable: + assert not splitter_directive.on_chip(obj) + assert not splitter_directive.is_precomputable(obj) + + assert splitter_directive.on_chip(onchip) + assert not splitter_directive.is_precomputable(onchip) - with pytest.raises(BuildError, match="Conv2D"): - split(net, precompute=False, node_neurons=default_node_neurons, - node_tau=0.005) + with pytest.raises(IndexError, match="not a part of the network"): + splitter_directive.is_precomputable( + nengo.Node(0, add_to_container=False)) def test_split_precompute_loop_error(): @@ -464,62 +200,99 @@ def test_split_precompute_loop_error(): nengo.Connection(node_offchip, ens_onchip) nengo.Connection(ens_onchip, node_offchip) - with pytest.raises(BuildError, match="precompute"): - split(net, precompute=True, node_neurons=default_node_neurons, - node_tau=0.005) + with pytest.raises(BuildError, match="Cannot precompute"): + SplitterDirective(net, precompute=True) -def test_splitnetwork_bad_add_type(): - net = nengo.Network() - networks = SplitNetworks(net) - networks.add(1, "chip") - with pytest.raises(AssertionError): - networks.finalize() +def test_chip_learning_errors(): + with nengo.Network() as net: + add_params(net) + a = nengo.Ensemble(100, 1) + b = nengo.Ensemble(100, 1) + net.config[b].on_chip = True -def test_splitnetwork_remove_add(): - net = nengo.Network() - networks = SplitNetworks(net) - e = nengo.Ensemble(1, 1, add_to_container=False) - networks.add(e, "chip") - networks.remove(e) - assert e not in networks.adds + nengo.Connection(a, b, learning_rule_type=nengo.PES()) + with pytest.raises(BuildError, match="Post ensemble"): + SplitterDirective(net) -def test_pesmodulatorytarget_interface(): - target = "target" - p = PESModulatoryTarget(target) + with nengo.Network() as net: + add_params(net) - t0 = 4 - e0 = [1.8, 2.4, 3.3] - t1 = t0 + 3 - e1 = [7.2, 2.2, 4.1] - e01 = np.array(e0) + np.array(e1) + a = nengo.Ensemble(100, 1) + b = nengo.Ensemble(100, 1) + error = nengo.Ensemble(100, 1) + net.config[error].on_chip = True + + conn = nengo.Connection(a, b, learning_rule_type=nengo.PES()) + nengo.Connection(error, conn.learning_rule) - p.receive(t0, e0) - assert isinstance(p.errors[t0], np.ndarray) - assert np.allclose(p.errors[t0], e0) + with pytest.raises(BuildError, match="Pre ensemble"): + SplitterDirective(net) - p.receive(t0, e1) - assert np.allclose(p.errors[t0], e01) - with pytest.raises(AssertionError): - p.receive(t0 - 1, e0) # time needs to be >= last time +@pytest.mark.parametrize("remove_passthrough", [True, False]) +def test_split_remove_passthrough(remove_passthrough): + with nengo.Network() as net: + add_params(net) + + keep1 = nengo.Node(0, label="keep1") + keep2 = nengo.Node(lambda t, x: x, size_in=1, label="keep2") + keep3 = nengo.Node(size_in=1, label="keep3") + + chip1 = nengo.Ensemble(10, 1, label="chip1") + discard1 = nengo.Node(size_in=1, label="discard1") + chip2 = nengo.Ensemble(10, 1, label="chip2") + discard2 = nengo.Node(size_in=1, label="discard2") + chip3 = nengo.Ensemble(10, 1, label="chip3") + + keep4 = nengo.Node(size_in=1, label="keep4") + probe = nengo.Probe(keep4) + + nengo.Connection(keep1, keep2) + nengo.Connection(keep2, keep3) + nengo.Connection(keep3, chip1) + conn1 = nengo.Connection(chip1, discard1) + conn2 = nengo.Connection(discard1, chip2) + conn3 = nengo.Connection(chip2, discard2) + conn4 = nengo.Connection(discard2, chip3) + nengo.Connection(chip3, keep4) + + splitter_directive = SplitterDirective( + net, remove_passthrough=remove_passthrough) + assert not splitter_directive.on_chip(probe) + + pd = splitter_directive.passthrough_directive + + if remove_passthrough: + assert pd.removed_passthroughs == {discard1, discard2} + assert pd.removed_connections == {conn1, conn2, conn3, conn4} + + conns = list(pd.added_connections) + assert len(conns) == 2 + + prepost = {(conn.pre, conn.post) for conn in conns} + assert prepost == {(chip1, chip2), (chip2, chip3)} + + else: + assert pd == (set(), set(), set()) + + +def test_sliced_passthrough_bug(): + with nengo.Network() as model: + add_params(model) - p.receive(t1, e1) - assert np.allclose(p.errors[t1], e1) + a = nengo.Ensemble(1, 1, label="a") + passthrough = nengo.Node(size_in=1, label="passthrough") - errors = list(p.collect_errors()) - assert len(errors) == 2 - assert errors[0][:2] == (target, t0) and np.allclose(errors[0][2], e01) - assert errors[1][:2] == (target, t1) and np.allclose(errors[1][2], e1) + nengo.Connection(a, passthrough) + p = nengo.Probe(passthrough[0]) - p.clear() - assert len(list(p.collect_errors())) == 0 + splitter_directive = SplitterDirective(model, remove_passthrough=True) + assert splitter_directive.passthrough_directive == (set(), set(), set()) -def test_bad_obj_type(): - split = SplitNetworks(nengo.Network()) - split.adds = {"woops": "host"} - with pytest.raises(AssertionError, match="cannot handle type"): - split.finalize() + assert splitter_directive.on_chip(a) + assert not splitter_directive.on_chip(passthrough) + assert not splitter_directive.on_chip(p)