Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Fix in the calculation of the cost for the validation set so it's exa…
Browse files Browse the repository at this point in the history
…ctly the same as training set.
  • Loading branch information
alexjc committed Nov 18, 2015
1 parent fd1987e commit 95b3b9b
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 7 deletions.
10 changes: 5 additions & 5 deletions sknn/backend/lasagne/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@ def _create_mlp_trainer(self, params):
loss_type = self.loss_type or ('mcc' if self.is_classifier else 'mse')
assert loss_type in cost_functions,\
"Loss type `%s` not supported by Lasagne backend." % loss_type
cost_fn = getattr(lasagne.objectives, cost_functions[loss_type])
cost_eval = cost_fn(self.symbol_output, self.tensor_output).mean()
self.cost_function = getattr(lasagne.objectives, cost_functions[loss_type])
cost_symbol = self.cost_function(self.symbol_output, self.tensor_output).mean()
if self.cost is not None:
cost_eval = cost_eval * self.cost
return self._create_trainer(params, cost_eval)
cost_symbol = cost_symbol + self.cost
return self._create_trainer(params, cost_symbol)

def _create_trainer(self, params, cost):
if self.learning_rule in ('sgd', 'adagrad', 'adadelta', 'rmsprop', 'adam'):
Expand Down Expand Up @@ -243,7 +243,7 @@ def _valid_impl(self, X, y):
loss, batches = 0.0, 0
for Xb, yb in self._iterate_data(X, y, self.batch_size, shuffle=True):
ys = self.f(Xb)
loss += ((ys - yb) ** 2.0).mean()
loss += self.cost_function(ys, yb).mean().eval()
batches += 1
return loss / batches

Expand Down
5 changes: 3 additions & 2 deletions sknn/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def _train(self, X, y):
else:
n_stable += 1

if n_stable >= self.n_stable:
if self.valid_set is not None and n_stable >= self.n_stable:
log.debug("")
log.info("Early termination condition fired at %i iterations.", i)
break
Expand Down Expand Up @@ -240,6 +240,7 @@ def get_params(self, deep=True):

class Regressor(MultiLayerPerceptron, sklearn.base.RegressorMixin):
# Regressor compatible with sklearn that wraps various NN implementations.
# The constructor and bulk of documentation is inherited from MultiLayerPerceptron.

def fit(self, X, y):
"""Fit the neural network to the given continuous data as a regression problem.
Expand Down Expand Up @@ -329,7 +330,7 @@ def fit(self, X, y):
log.warning('{}WARNING: Expecting `Softmax` type for the last layer '
'in classifier.{}\n'.format(ansi.YELLOW, ansi.ENDC))
if y.shape[1] > 1 and self.layers[-1].type != 'Sigmoid':
log.warning('{}WARNING: Expecting `Sigmoid` for last layer in '
log.warning('{}WARNING: Expecting `Sigmoid` as last layer in '
'multi-output classifier.{}\n'.format(ansi.YELLOW, ansi.ENDC))

# Deal deal with single- and multi-output classification problems.
Expand Down

0 comments on commit 95b3b9b

Please sign in to comment.