Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Additions to #473 #488

Merged
merged 2 commits into from
Sep 10, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 20 additions & 5 deletions pybop/costs/base_cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class BaseCost:

def __init__(self, problem: Optional[BaseProblem] = None):
self.parameters = Parameters()
self.transformation = None
self.problem = problem
self.verbose = False
self._has_separable_problem = False
Expand All @@ -46,6 +47,7 @@ def __init__(self, problem: Optional[BaseProblem] = None):
self.parameters.join(self.problem.parameters)
self.n_outputs = self.problem.n_outputs
self.signal = self.problem.signal
self.transformation = self.parameters.construct_transformation()
self._has_separable_problem = True
self.grad_fail = None
self.set_fail_gradient()
Expand All @@ -62,7 +64,12 @@ def has_separable_problem(self):
def target(self):
return self._target

def __call__(self, inputs: Union[Inputs, list], calculate_grad: bool = False):
def __call__(
self,
inputs: Union[Inputs, list],
calculate_grad: bool = False,
apply_transform: bool = False,
):
"""
This method calls the forward model via problem.evaluate(inputs),
and computes the cost for the given output by calling self.compute().
Expand All @@ -84,16 +91,24 @@ def __call__(self, inputs: Union[Inputs, list], calculate_grad: bool = False):
ValueError
If an error occurs during the calculation of the cost.
"""
# Apply transformation if needed
transform = self.transformation is not None and apply_transform
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
if transform:
inputs = self.transformation.to_model(inputs)
inputs = self.parameters.verify(inputs)
self.parameters.update(values=list(inputs.values()))
y, dy = None, None

y, dy = None, None
if self._has_separable_problem:
if calculate_grad is True:
if calculate_grad:
y, dy = self.problem.evaluateS1(self.problem.parameters.as_dict())
else:
y = self.problem.evaluate(self.problem.parameters.as_dict())
cost, grad = self.compute(y, dy=dy, calculate_grad=calculate_grad)
if transform and np.isfinite(cost):
jac = self.transformation.jacobian(inputs)
grad = np.matmul(grad, jac)
return cost, grad

y = self.problem.evaluate(self.problem.parameters.as_dict())
return self.compute(y, dy=dy, calculate_grad=calculate_grad)

def compute(self, y: dict, dy: ndarray, calculate_grad: bool = False):
Expand Down
49 changes: 3 additions & 46 deletions pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import warnings
from typing import Optional, Union
from typing import Optional

import numpy as np

from pybop import (
BaseCost,
BaseLikelihood,
DesignCost,
Inputs,
Parameter,
Parameters,
WeightedCost,
Expand Down Expand Up @@ -74,6 +73,7 @@ def __init__(

if isinstance(cost, BaseCost):
self.cost = cost
self._transformation = self.cost.transformation
self.parameters.join(cost.parameters)
self.set_allow_infeasible_solutions()
if isinstance(cost, WeightedCost):
Expand Down Expand Up @@ -127,10 +127,7 @@ def set_base_options(self):
"""
Update the base optimiser options and remove them from the options dictionary.
"""
# Set up the transformation
self._transformation = self.parameters.construct_transformation()

# Set initial values, if x0 is None, initial values are unmodified
# Set initial values, if x0 is None, initial values are unmodified.
self.parameters.update(initial_values=self.unset_options.pop("x0", None))
self.x0 = self.parameters.reset_initial_value(apply_transform=True)

Expand Down Expand Up @@ -201,46 +198,6 @@ def _run(self):
"""
raise NotImplementedError

def cost_call(self, inputs: Union[Inputs, list], calculate_grad: bool = False):
"""
Transform the inputs if required, then call the cost and silently catch and convert
any errors encountered during the cost computation into an infinite cost value.

Parameters
----------
inputs : Inputs or array-like
The parameters for which to compute the cost and gradient.
calculate_grad : bool, optional
A bool condition designating whether to calculate the gradient.

Returns
-------
float
The calculated cost function value or an infinite value if an
error occurs during the calculation of the cost.
"""
if self._transformation:
q = inputs
inputs = self._transformation.to_model(q)

try:
if calculate_grad:
cost, grad = self.cost(inputs=inputs, calculate_grad=calculate_grad)

if self._transformation and np.isfinite(cost):
jacobian = self._transformation.jacobian(q)
grad = np.matmul(grad, jacobian)

return cost, grad

return self.cost(inputs=inputs)

except NotImplementedError as e:
raise e

except Exception as e:
raise ValueError(f"Error in cost calculation: {e}") from e

def log_update(self, x=None, x_best=None, cost=None):
"""
Update the log with new values.
Expand Down
21 changes: 10 additions & 11 deletions pybop/optimisers/base_pints_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,15 +190,14 @@ def _run(self):
unchanged_iterations = 0

# Choose method to evaluate
if self._needs_sensitivities:

def f(x):
L, dl = self.cost_call(x, calculate_grad=True)
return (L, dl) if self.minimising else (-L, -dl)
else:

def f(x):
return self.cost_call(x) if self.minimising else -self.cost_call(x)
def fun(x):
if self._needs_sensitivities:
L, dl = self.cost(x, calculate_grad=True, apply_transform=True)
else:
L = self.cost(x, apply_transform=True)
dl = None
sign = -1 if not self.minimising else 1
return (sign * L, sign * dl) if dl is not None else sign * L

# Create evaluator object
if self._parallel:
Expand All @@ -209,9 +208,9 @@ def f(x):
# particles!
if isinstance(self.pints_optimiser, PintsPopulationBasedOptimiser):
n_workers = min(n_workers, self.pints_optimiser.population_size())
evaluator = PintsParallelEvaluator(f, n_workers=n_workers)
evaluator = PintsParallelEvaluator(fun, n_workers=n_workers)
else:
evaluator = PintsSequentialEvaluator(f)
evaluator = PintsSequentialEvaluator(fun)

# Keep track of current best and best-guess scores.
fb = fg = np.inf
Expand Down
12 changes: 6 additions & 6 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _run(self):
x=self._transformation.to_model(result.x)
if self._transformation
else result.x,
final_cost=self.cost_call(result.x),
final_cost=self.cost(result.x),
n_iterations=nit,
scipy_result=result,
)
Expand Down Expand Up @@ -165,13 +165,13 @@ def cost_wrapper(self, x):
self.log_update(x=[x])

if not self._options["jac"]:
cost = self.cost_call(x) / self._cost0
cost = self.cost(x) / self._cost0
if np.isinf(cost):
self.inf_count += 1
cost = 1 + 0.9**self.inf_count # for fake finite gradient
return cost if self.minimising else -cost

L, dl = self.cost_call(x, calculate_grad=True)
L, dl = self.cost(x, calculate_grad=True)
return (L, dl) if self.minimising else (-L, -dl)

def _run_optimiser(self):
Expand Down Expand Up @@ -212,7 +212,7 @@ def base_callback(intermediate_result: Union[OptimizeResult, np.ndarray]):
)

# Compute the absolute initial cost and resample if required
self._cost0 = np.abs(self.cost_call(self.x0))
self._cost0 = np.abs(self.cost(self.x0))
if np.isinf(self._cost0):
for _i in range(1, self.num_resamples):
try:
Expand All @@ -224,7 +224,7 @@ def base_callback(intermediate_result: Union[OptimizeResult, np.ndarray]):
stacklevel=2,
)
break
self._cost0 = np.abs(self.cost_call(self.x0))
self._cost0 = np.abs(self.cost(self.x0))
if not np.isinf(self._cost0):
break
if np.isinf(self._cost0):
Expand Down Expand Up @@ -352,7 +352,7 @@ def callback(intermediate_result: OptimizeResult):

def cost_wrapper(x):
self.log_update(x=[x])
return self.cost_call(x) if self.minimising else -self.cost_call(x)
return self.cost(x) if self.minimising else -self.cost(x)

return differential_evolution(
cost_wrapper,
Expand Down
20 changes: 0 additions & 20 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,6 @@ def two_param_cost(self, model, two_parameters, dataset):
)
return pybop.SumSquaredError(problem)

@pytest.mark.unit
def test_optimiser_without_cost_call(self, one_parameter):
cost = pybop.BaseCost()
cost.parameters = pybop.Parameters(one_parameter)
optim = pybop.Optimisation(cost=cost)
with pytest.raises(NotImplementedError):
optim.cost_call([0.5])

@pytest.mark.parametrize(
"optimiser, expected_name",
[
Expand Down Expand Up @@ -318,18 +310,6 @@ def test_single_parameter(self, cost):
):
pybop.CMAES(cost=cost)

@pytest.mark.unit
def test_error_in_cost_calculation(self):
class RaiseErrorCost(pybop.BaseCost):
def __call__(self, inputs=None, calculate_grad: bool = False):
raise ValueError("Error test.")

cost = RaiseErrorCost()
cost.parameters = pybop.Parameters(pybop.Parameter("p", initial_value=1.0))
optim = pybop.Optimisation(cost)
with pytest.raises(ValueError, match="Error in cost calculation: Error test."):
optim.cost_call(1.0)

@pytest.mark.unit
def test_invalid_cost(self):
# Test without valid cost
Expand Down
Loading