Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Support for calling set_parameters on an initialised neural network w…
Browse files Browse the repository at this point in the history
…ith either a list or dict specifying (weights, biases) for layers.

Closes #106, Closes #119.
  • Loading branch information
alexjc committed Nov 17, 2015
1 parent 42397ef commit ada168b
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 6 deletions.
5 changes: 4 additions & 1 deletion sknn/backend/lasagne/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,10 @@ def _mlp_to_array(self):
return [self._mlp_get_params(l) for l in self.mlp]

def _array_to_mlp(self, array, nn):
for layer, (weights, biases) in zip(nn, array):
for layer, data in zip(nn, array):
if data is None: continue
weights, biases = data

while not hasattr(layer, 'W') and not hasattr(layer, 'b'):
layer = layer.input_layer

Expand Down
33 changes: 28 additions & 5 deletions sknn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,12 +487,35 @@ def get_parameters(self):
-------
params : list of tuples
For each layer in the order they are passed to the constructor, a named-tuple
of three items `name` (string), `weights` and `biases` (both numpy arrays) in
that order.
of three items `weights`, `biases` (both numpy arrays) and `name` (string)
in that order.
"""

assert self._backend is not None,\
"Backend was not initialized; could not retrieve network parameters."

P = collections.namedtuple('Parameters', 'layer weights biases')
return [P(s.name, w, b) for s, (w, b) in zip(self.layers, self._backend._mlp_to_array())]
P = collections.namedtuple('Parameters', 'weights biases layer')
return [P(w, b, s.name) for s, (w, b) in zip(self.layers, self._backend._mlp_to_array())]

def set_parameters(self, storage):
"""Store the given weighs and biases into the neural network. If the neural network
has not been initialized, use the `weights` list as construction parameter instead.
Otherwise if the neural network is initialized, this function will extract the parameters
from the input list or dictionary and store them accordingly.
Parameters
----------
storage : list of tuples, or dictionary of tuples
Either a list of tuples for each layer, storing two items `weights` and `biases` in
the exact same order as construction. Alternatively, if this is a dictionary, a string
to tuple mapping for each layer also storing `weights` and `biases` but not necessarily
for all layers.
"""
assert self._backend is not None,\
"Backend was not initialized; could not store network parameters."

if isinstance(storage, dict):
layers = [storage.get(l.name, None) for l in self.layers]
else:
layers = storage

return self._backend._array_to_mlp(layers, self._backend.mlp)
39 changes: 39 additions & 0 deletions sknn/tests/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,42 @@ def test_GetLayerParams(self):
assert_equals(p[0].layer, 'output')
assert_equals(p[0].weights.shape, (16, 4))
assert_equals(p[0].biases.shape, (4,))

def test_SetLayerParamsList(self):
nn = MLPR(layers=[L("Linear")])
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn._initialize(a_in, a_out)

weights = numpy.random.uniform(-1.0, +1.0, (16,4))
biases = numpy.random.uniform(-1.0, +1.0, (4,))
nn.set_parameters([(weights, biases)])

p = nn.get_parameters()
assert_true((p[0].weights == weights).all())
assert_true((p[0].biases == biases).all())

def test_LayerParamsSkipOneWithNone(self):
nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn._initialize(a_in, a_out)

weights = numpy.random.uniform(-1.0, +1.0, (32,4))
biases = numpy.random.uniform(-1.0, +1.0, (4,))
nn.set_parameters([None, (weights, biases)])

p = nn.get_parameters()
assert_true((p[1].weights == weights).all())
assert_true((p[1].biases == biases).all())

def test_SetLayerParamsDict(self):
nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn._initialize(a_in, a_out)

weights = numpy.random.uniform(-1.0, +1.0, (32,4))
biases = numpy.random.uniform(-1.0, +1.0, (4,))
nn.set_parameters({'abcd': (weights, biases)})

p = nn.get_parameters()
assert_true((p[1].weights == weights).all())
assert_true((p[1].biases == biases).all())

0 comments on commit ada168b

Please sign in to comment.