Skip to content

Commit

Permalink
Remove problematic source tensor sorting
Browse files Browse the repository at this point in the history
This is no longer required, and is problematic for models that
have an output that is used other places in the model (since
the sorting puts all outputs at the end).
  • Loading branch information
hunse committed Mar 19, 2020
1 parent 313e602 commit e3aa159
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 12 deletions.
12 changes: 0 additions & 12 deletions nengo_dl/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -822,18 +822,6 @@ def convert(self, node_id):
# that need to be built into the Nengo network
source_tensors = self.trace_tensors(self.layer.outputs)

def sort_key(x):
# sort tensors so that order of model inputs/outputs is preserved
for i, y in enumerate(self.layer.inputs):
if x is y:
return -(len(self.layer.inputs) - i)
for i, y in enumerate(self.layer.outputs):
if x is y:
return i + 1
return 0

source_tensors = sorted(source_tensors, key=sort_key)

for tensor in source_tensors:
# look up the layer/node to be converted
model_layer, model_node_id, _ = self.get_history(tensor)
Expand Down
31 changes: 31 additions & 0 deletions nengo_dl/tests/test_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,3 +728,34 @@ def convert(self, node_id):
converter.Converter(model)
finally:
converter.Converter.converters.pop(MyLayer)


def test_mid_model_output(Simulator):
"""Check that converter supports output tensors from the middle of the model.
Previous converter put output tensors last in build order, so having an output
tensor that needed to be built before non-output tensors was problematic.
https://github.com/nengo/nengo-dl/pull/137
"""
input_val = 2.0
n_steps = 5

# model must have at least three layers, with one layer in between outputs
inp = tf.keras.Input(shape=(1,))
x0 = tf.keras.layers.ReLU()(inp)
x1 = tf.keras.layers.ReLU()(x0)
x2 = tf.keras.layers.ReLU()(x1)
model = tf.keras.Model(inp, [x0, x2])

conv = converter.Converter(
model, swap_activations={tf.nn.relu: nengo.RectifiedLinear()},
)

with Simulator(conv.net) as sim:
sim.run_steps(
n_steps, data={conv.inputs[inp]: np.ones((1, n_steps, 1)) * input_val}
)

output_probes = [conv.outputs[output] for output in model.outputs]
for probe in output_probes:
assert np.allclose(sim.data[probe][-1], input_val)

0 comments on commit e3aa159

Please sign in to comment.