From 95b3b9b352130da1c8d4a0b462cab34198e2c088 Mon Sep 17 00:00:00 2001 From: "Alex J. Champandard" Date: Wed, 18 Nov 2015 16:06:53 +0100 Subject: [PATCH] Fix in the calculation of the cost for the validation set so it's exactly the same as training set. --- sknn/backend/lasagne/mlp.py | 10 +++++----- sknn/mlp.py | 5 +++-- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/sknn/backend/lasagne/mlp.py b/sknn/backend/lasagne/mlp.py index 123f2aa..d7886d6 100644 --- a/sknn/backend/lasagne/mlp.py +++ b/sknn/backend/lasagne/mlp.py @@ -62,11 +62,11 @@ def _create_mlp_trainer(self, params): loss_type = self.loss_type or ('mcc' if self.is_classifier else 'mse') assert loss_type in cost_functions,\ "Loss type `%s` not supported by Lasagne backend." % loss_type - cost_fn = getattr(lasagne.objectives, cost_functions[loss_type]) - cost_eval = cost_fn(self.symbol_output, self.tensor_output).mean() + self.cost_function = getattr(lasagne.objectives, cost_functions[loss_type]) + cost_symbol = self.cost_function(self.symbol_output, self.tensor_output).mean() if self.cost is not None: - cost_eval = cost_eval * self.cost - return self._create_trainer(params, cost_eval) + cost_symbol = cost_symbol + self.cost + return self._create_trainer(params, cost_symbol) def _create_trainer(self, params, cost): if self.learning_rule in ('sgd', 'adagrad', 'adadelta', 'rmsprop', 'adam'): @@ -243,7 +243,7 @@ def _valid_impl(self, X, y): loss, batches = 0.0, 0 for Xb, yb in self._iterate_data(X, y, self.batch_size, shuffle=True): ys = self.f(Xb) - loss += ((ys - yb) ** 2.0).mean() + loss += self.cost_function(ys, yb).mean().eval() batches += 1 return loss / batches diff --git a/sknn/mlp.py b/sknn/mlp.py index 9021e64..3466f6e 100644 --- a/sknn/mlp.py +++ b/sknn/mlp.py @@ -165,7 +165,7 @@ def _train(self, X, y): else: n_stable += 1 - if n_stable >= self.n_stable: + if self.valid_set is not None and n_stable >= self.n_stable: log.debug("") log.info("Early termination condition fired at %i iterations.", i) break @@ -240,6 +240,7 @@ def get_params(self, deep=True): class Regressor(MultiLayerPerceptron, sklearn.base.RegressorMixin): # Regressor compatible with sklearn that wraps various NN implementations. + # The constructor and bulk of documentation is inherited from MultiLayerPerceptron. def fit(self, X, y): """Fit the neural network to the given continuous data as a regression problem. @@ -329,7 +330,7 @@ def fit(self, X, y): log.warning('{}WARNING: Expecting `Softmax` type for the last layer ' 'in classifier.{}\n'.format(ansi.YELLOW, ansi.ENDC)) if y.shape[1] > 1 and self.layers[-1].type != 'Sigmoid': - log.warning('{}WARNING: Expecting `Sigmoid` for last layer in ' + log.warning('{}WARNING: Expecting `Sigmoid` as last layer in ' 'multi-output classifier.{}\n'.format(ansi.YELLOW, ansi.ENDC)) # Deal deal with single- and multi-output classification problems.