Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Support for calling set_parameters() even before initialize. Construc…
Browse files Browse the repository at this point in the history
…tor parameter now called parameters too, no longer weights. Closes #188.
  • Loading branch information
alexjc committed Apr 2, 2016
1 parent 1f6fb25 commit 0d42fa7
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 12 deletions.
8 changes: 4 additions & 4 deletions sknn/backend/lasagne/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,11 +203,11 @@ def _create_mlp(self, X, w=None):
assert count == space[1],\
"Mismatch in the calculated number of dense layer outputs."

if self.weights is not None:
l = min(len(self.weights), len(self.mlp))
if self.parameters is not None:
l = min(len(self.parameters), len(self.mlp))
log.info("Reloading parameters for %i layer weights and biases." % (l,))
self._array_to_mlp(self.weights, self.mlp)
self.weights = None
self._array_to_mlp(self.parameters, self.mlp)
self.parameters = None

log.debug("")

Expand Down
2 changes: 1 addition & 1 deletion sknn/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def _predict(self, X):
if self._backend is None:
assert self.layers[-1].units is not None,\
"You must specify the number of units to predict without fitting."
if self.weights is None:
if self.parameters is None:
log.warning("WARNING: Computing estimates with an untrained network.")
self._initialize(X)

Expand Down
15 changes: 9 additions & 6 deletions sknn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ class NeuralNetwork(object):
Seed for the initialization of the neural network parameters (e.g.
weights and biases). This is fully deterministic.
weights: list of tuple of array-like, optional
parameters: list of tuple of array-like, optional
A list of ``(weights, biases)`` tuples to be reloaded for each layer, in the same
order as ``layers`` was specified. Useful for initializing with pre-trained
networks.
Expand Down Expand Up @@ -400,7 +400,7 @@ def __init__(
self,
layers,
warning=None,
weights=None,
parameters=None,
random_state=None,
learning_rule='sgd',
learning_rate=0.01,
Expand Down Expand Up @@ -451,7 +451,7 @@ def __init__(
assert loss_type in ('mse', 'mae', 'mcc', None),\
"Unknown loss function type specified: %s." % loss_type

self.weights = weights
self.parameters = parameters
self.random_state = random_state
self.learning_rule = learning_rule
self.learning_rate = learning_rate
Expand Down Expand Up @@ -534,7 +534,7 @@ def _create_logger(self):
def get_parameters(self):
"""Extract the neural networks weights and biases layer by layer. Only valid
once the neural network has been initialized, for example via `fit()` function.
Returns
-------
params : list of tuples
Expand Down Expand Up @@ -562,8 +562,11 @@ def set_parameters(self, storage):
to tuple mapping for each layer also storing `weights` and `biases` but not necessarily
for all layers.
"""
assert self._backend is not None,\
"Backend was not initialized; could not store network parameters."

# In case the class is not initialized, store the parameters for later during _initialize.
if self._backend is None:
self.parameters = storage
return

if isinstance(storage, dict):
layers = [storage.get(l.name, None) for l in self.layers]
Expand Down
31 changes: 30 additions & 1 deletion sknn/tests/test_data.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import random
import unittest
from nose.tools import (assert_greater, assert_less, assert_raises, assert_equals, assert_true)
from nose.tools import (assert_greater, assert_less, assert_raises,\
assert_equals, assert_in, assert_true)

import io
import logging

import numpy
Expand Down Expand Up @@ -38,6 +40,14 @@ def test_DataIsUsed(self):

class TestNetworkParameters(unittest.TestCase):

def setUp(self):
self.buf = io.StringIO()
self.hnd = logging.StreamHandler(self.buf)
logging.getLogger('sknn').addHandler(self.hnd)

def tearDown(self):
logging.getLogger('sknn').removeHandler(self.hnd)

def test_GetLayerParams(self):
nn = MLPR(layers=[L("Linear")], n_iter=1)
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
Expand All @@ -51,6 +61,25 @@ def test_GetLayerParams(self):
assert_equals(p[0].weights.shape, (16, 4))
assert_equals(p[0].biases.shape, (4,))

def test_SetParametersBeforeInit(self):
nn = MLPR(layers=[L("Linear")])
weights = numpy.random.uniform(-1.0, +1.0, (16,4))
biases = numpy.random.uniform(-1.0, +1.0, (4,))
nn.set_parameters([(weights, biases)])

a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn._initialize(a_in, a_out)
assert_in('Reloading parameters for 1 layer weights and biases.', self.buf.getvalue())

def test_SetParametersConstructor(self):
weights = numpy.random.uniform(-1.0, +1.0, (16,4))
biases = numpy.random.uniform(-1.0, +1.0, (4,))
nn = MLPR(layers=[L("Linear")], parameters=[(weights, biases)])

a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn._initialize(a_in, a_out)
assert_in('Reloading parameters for 1 layer weights and biases.', self.buf.getvalue())

def test_SetLayerParamsList(self):
nn = MLPR(layers=[L("Linear")])
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
Expand Down

0 comments on commit 0d42fa7

Please sign in to comment.