Skip to content

Commit

Permalink
Combine splitter into builder
Browse files Browse the repository at this point in the history
Most of the work done by the splitter is now done in the builder.
This should give more clarity and control over the mapping between
pre-build and post-build objects. The `SplitterDirective` class
takes on the organizational tasks of the old `Splitter`, giving
directives to the builder about what should be on- or off-chip.

Also:
- Add unit tests for splitter refactoring.
- Raise `BuildError` if learning objects are on_chip. Fixes #208
  and #209.
- Pass no decoder cache to sub-models. Decoder cache wasn't working
  due to lack of context manager which is normally constructed by
  the top-level network build. Fixes #207.
- Various improvements to passthrough removal, including not removing
  useful passthrough nodes.
  Outstanding issues include: #210, #212, #213
- Handle sliced probes. Closes #205.
- Check that splitter handles sliced probes. Closes #206.
- Test that splitter does not mutate network. Closes #211.
  • Loading branch information
arvoelke authored and hunse committed Apr 3, 2019
1 parent e3f1be0 commit df54ce5
Show file tree
Hide file tree
Showing 11 changed files with 1,004 additions and 1,112 deletions.
58 changes: 55 additions & 3 deletions nengo_loihi/builder/builder.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from collections import defaultdict, OrderedDict
import logging

from nengo import Network
from nengo import Network, Node, Ensemble, Connection, Probe
from nengo.builder import Model as NengoModel
from nengo.builder.builder import Builder as NengoBuilder
from nengo.builder.network import build_network
from nengo.cache import NoDecoderCache
Expand Down Expand Up @@ -89,10 +90,28 @@ def __init__(self, dt=0.001, label=None, builder=None):
self.build_callback = None
self.decoder_cache = NoDecoderCache()

# Host models filled in by the build process
def create_host_model(label, dt):
# We don't use a decoder cache because it requires a context
# manager that differs depending on which sub-model is being built
return NengoModel(
dt=float(dt),
label="%s, dt=%f" % (label, dt),
decoder_cache=NoDecoderCache())

# TODO: these models may not look/behave exactly the same as
# standard nengo models, because they don't have a toplevel network
# built into them or configs set
self.host_pre = create_host_model(label="%s, host_pre" % label, dt=dt)
self.host = create_host_model(label="%s, host" % label, dt=dt)

# Objects created by the model for simulation on Loihi
self.inputs = OrderedDict()
self.blocks = OrderedDict()

# Will be filled in by the simulator __init__
self.splitter_directive = None

# Will be filled in by the network builder
self.toplevel = None
self.config = None
Expand Down Expand Up @@ -128,8 +147,11 @@ def __init__(self, dt=0.001, label=None, builder=None):
# magnitude/weight resolution)
self.pes_wgt_exp = 4

# Will be provided by Simulator
# Used to track interactions between host models
self.chip2host_params = {}
self.chip2host_receivers = OrderedDict()
self.host2chip_senders = OrderedDict()
self.needs_sender = {}

def __getstate__(self):
raise NotImplementedError("Can't pickle nengo_loihi.builder.Model")
Expand All @@ -150,8 +172,38 @@ def add_block(self, block):
assert block not in self.blocks
self.blocks[block] = len(self.blocks)

def delegate(self, obj):
if not isinstance(obj, (Node, Ensemble, Probe)):
# Note: this is safe because any objects built from within a normal
# nengo model (other than self) will not be re-delegated
return self
elif self.splitter_directive.on_chip(obj):
return self
elif self.splitter_directive.is_precomputable(obj):
return self.host_pre
else:
return self.host

def build(self, obj, *args, **kwargs):
built = self.builder.build(self, obj, *args, **kwargs)
# Don't build the passthrough nodes or connections
passthrough_directive = self.splitter_directive.passthrough_directive
if (isinstance(obj, Node)
and obj in passthrough_directive.removed_passthroughs):
return None
if (isinstance(obj, Connection)
and obj in passthrough_directive.removed_connections):
return None

# Note: any callbacks for host_pre or host will not be invoked here
model = self.delegate(obj)
if model is not self:
# done for compatibility with nengo<=2.8.0
# otherwise we could just copy over the initial
# seeding to all other models
model.seeds[obj] = self.seeds[obj]
model.seeded[obj] = self.seeded[obj]

built = model.builder.build(model, obj, *args, **kwargs)
if self.build_callback is not None:
self.build_callback(obj)
return built
Expand Down
268 changes: 264 additions & 4 deletions nengo_loihi/builder/connection.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import copy
import logging

import nengo
from nengo import Ensemble, Connection, Node
from nengo import Ensemble, Connection, Node, Probe as NengoProbe
from nengo.builder.connection import (
build_no_solver as _build_no_solver,
BuiltConnection,
get_eval_points,
get_targets,
multiply,
)
from nengo.connection import LearningRule
from nengo.ensemble import Neurons
from nengo.exceptions import BuildError, ValidationError
from nengo.solvers import NoSolver, Solver
Expand All @@ -15,10 +19,267 @@
from nengo_loihi import conv
from nengo_loihi.block import Axon, LoihiBlock, Probe, Synapse
from nengo_loihi.builder.builder import Builder
from nengo_loihi.builder.inputs import ChipReceiveNeurons, LoihiInput
from nengo_loihi.builder.inputs import (
LoihiInput,
ChipReceiveNode,
ChipReceiveNeurons,
HostSendNode,
HostReceiveNode,
PESModulatoryTarget,
)
from nengo_loihi.compat import (
nengo_transforms, sample_transform, conn_solver)
from nengo_loihi.neurons import loihi_rates
from nengo_loihi.passthrough import base_obj

logger = logging.getLogger(__name__)


def _inherit_seed(dest_model, dest_obj, src_model, src_obj):
dest_model.seeded[dest_obj] = src_model.seeded[src_obj]
dest_model.seeds[dest_obj] = src_model.seeds[src_obj]


@Builder.register(Connection)
def build_connection(model, conn):
is_pre_chip = model.splitter_directive.on_chip(base_obj(conn.pre))

if isinstance(conn.post_obj, LearningRule):
assert not is_pre_chip
return build_host_to_learning_rule(model, conn)

is_post_chip = model.splitter_directive.on_chip(base_obj(conn.post))

if is_pre_chip and is_post_chip:
build_chip_connection(model, conn)

elif not is_pre_chip and is_post_chip:
if isinstance(conn.pre_obj, Neurons):
build_host_neurons_to_chip(model, conn)
else:
build_host_to_chip(model, conn)

elif is_pre_chip and not is_post_chip:
build_chip_to_host(model, conn)

else:
assert is_pre_chip == is_post_chip
nengo_model = model.delegate(base_obj(conn.pre))
assert nengo_model is model.delegate(base_obj(conn.post))
_inherit_seed(nengo_model, conn, model, conn)
nengo_model.build(conn)


def build_host_neurons_to_chip(model, conn):
"""Send spikes over and do the rest of the connection on-chip"""

assert not isinstance(conn.post, LearningRule)
dim = conn.size_in
nengo_model = model.delegate(base_obj(conn.pre))

logger.debug("Creating ChipReceiveNeurons for %s", conn)
receive = ChipReceiveNeurons(
dim,
neuron_type=conn.pre_obj.ensemble.neuron_type,
label=None if conn.label is None else "%s_neurons" % conn.label,
add_to_container=False,
)
_inherit_seed(model, receive, model, conn)
model.builder.build(model, receive)

receive2post = Connection(
receive,
conn.post,
transform=conn.transform,
synapse=conn.synapse,
label=None if conn.label is None else "%s_chip" % conn.label,
add_to_container=False,
)
_inherit_seed(model, receive2post, model, conn)
build_chip_connection(model, receive2post)

logger.debug("Creating HostSendNode for %s", conn)
send = HostSendNode(
dim,
label=None if conn.label is None else "%s_send" % conn.label,
add_to_container=False,
)
nengo_model.build(send)

pre2send = Connection(
conn.pre,
send,
synapse=None,
label=None if conn.label is None else "%s_host" % conn.label,
add_to_container=False,
)
model.host2chip_senders[send] = receive
_inherit_seed(nengo_model, pre2send, model, conn)
nengo_model.build(pre2send)


def build_host_to_chip(model, conn):
rng = np.random.RandomState(model.seeds[conn])
dim = conn.size_out
nengo_model = model.delegate(base_obj(conn.pre))

logger.debug("Creating ChipReceiveNode for %s", conn)
receive = ChipReceiveNode(
dim * 2,
size_out=dim,
label=None if conn.label is None else "%s_node" % conn.label,
add_to_container=False,
)
model.builder.build(model, receive)

receive2post = Connection(
receive,
conn.post,
synapse=model.decode_tau,
label=None if conn.label is None else "%s_chip" % conn.label,
add_to_container=False,
)
_inherit_seed(model, receive2post, model, conn)
build_chip_connection(model, receive2post)

logger.debug("Creating DecodeNeuron ensemble for %s", conn)
ens = model.node_neurons.get_ensemble(dim)
ens.label = None if conn.label is None else "%s_ens" % conn.label
_inherit_seed(nengo_model, ens, model, conn)
nengo_model.build(ens)

if nengo_transforms is not None and isinstance(
conn.transform, nengo_transforms.Convolution):
raise BuildError(
"Conv2D transforms not supported for off-chip to "
"on-chip connections where `pre` is not a Neurons object.")

# Scale the input spikes based on the radius of the target ensemble
weights = sample_transform(conn, rng=rng)

if isinstance(conn.post_obj, Ensemble):
weights = weights / conn.post_obj.radius

if nengo_transforms is None:
transform = weights
else:
# copy the Transform information, setting `init` to the sampled weights
transform = copy.copy(conn.transform)
type(transform).init.data[transform] = weights

pre2ens = Connection(
conn.pre,
ens,
function=conn.function,
solver=conn.solver,
eval_points=conn.eval_points,
scale_eval_points=conn.scale_eval_points,
synapse=conn.synapse,
transform=transform,
label=None if conn.label is None else "%s_enc" % conn.label,
add_to_container=False,
)
_inherit_seed(nengo_model, pre2ens, model, conn)
nengo_model.build(pre2ens)

logger.debug("Creating HostSendNode for %s", conn)
send = HostSendNode(
dim * 2,
label=None if conn.label is None else "%s_send" % conn.label,
add_to_container=False,
)
nengo_model.build(send)

ensneurons2send = Connection(
ens.neurons,
send,
synapse=None,
label=None if conn.label is None else "%s_host" % conn.label,
add_to_container=False,
)
_inherit_seed(nengo_model, ensneurons2send, model, conn)
model.host2chip_senders[send] = receive
nengo_model.build(ensneurons2send)


def build_chip_to_host(model, conn):
rng = np.random.RandomState(model.seeds[conn])
dim = conn.size_out
nengo_model = model.delegate(base_obj(conn.post))

logger.debug("Creating HostReceiveNode for %s", conn)
receive = HostReceiveNode(
dim,
label=None if conn.label is None else "%s_receive" % conn.label,
add_to_container=False,
)
nengo_model.build(receive)

receive2post = Connection(
receive,
conn.post,
synapse=conn.synapse,
label=None if conn.label is None else "%s_host" % conn.label,
add_to_container=False,
)
_inherit_seed(nengo_model, receive2post, model, conn)
nengo_model.build(receive2post)

logger.debug("Creating Probe for %s", conn)
transform = sample_transform(conn, rng=rng)

probe = NengoProbe(conn.pre,
synapse=None,
solver=conn.solver,
add_to_container=False)
model.chip2host_params[probe] = dict(
learning_rule_type=conn.learning_rule_type,
function=conn.function,
eval_points=conn.eval_points,
scale_eval_points=conn.scale_eval_points,
transform=transform,
label=None if conn.label is None else "%s_probe" % conn.label,
)
model.chip2host_receivers[probe] = receive
_inherit_seed(model, probe, model, conn)
model.builder.build(model, probe)

if conn.learning_rule_type is not None:
if not isinstance(conn.pre_obj, Ensemble):
raise NotImplementedError(
"Learning rule presynaptic object must be an Ensemble "
"(got %r)" % type(conn.pre_obj).__name__)
model.needs_sender[conn.learning_rule] = PESModulatoryTarget(probe)


def build_host_to_learning_rule(model, conn):
dim = conn.size_out
nengo_model = model.delegate(base_obj(conn.pre))

logger.debug("Creating HostSendNode for %s", conn)
send = HostSendNode(
dim,
label=None if conn.label is None else "%s_send" % conn.label,
add_to_container=False,
)
nengo_model.build(send)

pre2send = Connection(
conn.pre,
send,
function=conn.function,
solver=conn.solver,
eval_points=conn.eval_points,
scale_eval_points=conn.scale_eval_points,
synapse=conn.synapse,
transform=conn.transform,
label=conn.label,
add_to_container=False,
)
pes_target = model.needs_sender[conn.post_obj]
model.host2chip_senders[send] = pes_target
_inherit_seed(nengo_model, pre2send, model, conn)
nengo_model.build(pre2send)


def build_decoders(model, conn, rng, sampled_transform):
Expand Down Expand Up @@ -114,8 +375,7 @@ def build_no_solver(model, solver, conn, rng, sampled_transform):
return _build_no_solver(model, solver, conn, rng)


@Builder.register(Connection) # noqa: C901
def build_connection(model, conn):
def build_chip_connection(model, conn): # noqa: C901
if nengo_transforms is not None:
if isinstance(conn.transform, nengo_transforms.Convolution):
# TODO: integrate these into the same function
Expand Down
Loading

0 comments on commit df54ce5

Please sign in to comment.