Skip to content

Commit

Permalink
Add support for LeakyReLU layers to Converter
Browse files Browse the repository at this point in the history
  • Loading branch information
drasmuss committed Jan 23, 2020
1 parent 6210ee4 commit 7aab265
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ Release history
(not the training behaviour). (`#119`_)
- Added ``nengo_dl.LeakyReLU`` and ``nengo_dl.SpikingLeakyReLU`` neuron models.
(`#126`_)
- Added support for leaky ReLU Keras layers to ``nengo_dl.Converter``. (`#126`_)

**Changed**

Expand Down
31 changes: 26 additions & 5 deletions nengo_dl/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@
from tensorflow.python.util import nest

from nengo_dl.config import configure_settings
from nengo_dl.tensor_node import Layer, TensorNode
from nengo_dl.neurons import LeakyReLU
from nengo_dl.simulator import Simulator
from nengo_dl.tensor_node import Layer, TensorNode


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -1121,8 +1123,8 @@ def convert(self, node_id):
broadcast_bias = np.zeros(self.output_shape(node_id))
for i in range(idxs.shape[axis]):
slices[axis] = i
broadcast_scale[slices] = scale[i]
broadcast_bias[slices] = bias[i]
broadcast_scale[tuple(slices)] = scale[i]
broadcast_bias[tuple(slices)] = bias[i]
broadcast_scale = np.ravel(broadcast_scale)
broadcast_bias = np.ravel(broadcast_bias)

Expand Down Expand Up @@ -1408,10 +1410,29 @@ def convert(self, node_id):
class ConvertReLU(LayerConverter):
"""Convert ``tf.keras.layers.ReLU`` to Nengo objects."""

unsupported_args = [("negative_slope", 0), "max_value", ("threshold", 0)]
unsupported_args = ["max_value", ("threshold", 0)]

def convert(self, node_id):
output = self.add_nengo_obj(node_id, biases=None, activation=tf.nn.relu)
if self.layer.negative_slope == 0:
activation = tf.nn.relu
else:
activation = LeakyReLU(negative_slope=self.layer.negative_slope)

output = self.add_nengo_obj(node_id, biases=None, activation=activation)

self.add_connection(node_id, output)

return output


@Converter.register(tf.keras.layers.LeakyReLU)
class ConvertLeakyReLU(LayerConverter):
"""Convert ``tf.keras.layers.LeakyReLU`` to Nengo objects."""

def convert(self, node_id):
output = self.add_nengo_obj(
node_id, biases=None, activation=LeakyReLU(negative_slope=self.layer.alpha)
)

self.add_connection(node_id, output)

Expand Down
8 changes: 8 additions & 0 deletions nengo_dl/tests/test_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,3 +514,11 @@ def test_nested_input():
x = tf.keras.layers.Concatenate()([x, x_0])

_test_convert(inputs, x)


def test_leaky_relu(rng):
inp = x = tf.keras.Input(shape=(4,))
x = tf.keras.layers.ReLU(negative_slope=0.1)(x)
x = tf.keras.layers.LeakyReLU(alpha=2)(x)

_test_convert(inp, x, inp_vals=[rng.uniform(-1, 1, size=(32, 4))])

0 comments on commit 7aab265

Please sign in to comment.