Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Increment PyBaMM > 23.5 #252

Merged
merged 7 commits into from
Apr 18, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/release_workflow.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ To create a new release, follow these steps:
1. **Prepare the Release:**
- Create a new branch for the release (i.e. `v24.XX`) from `develop`.
- Increment the following;
- The version number in the `pyproject.toml` file following CalVer versioning.
- The version number in the `pyproject.toml` and `CITATION.cff` files following CalVer versioning.
- The`CHANGELOG.md` version with the changes for the new version.
- Open a PR to the `main` branch. Once the PR is merged, proceed to the next step.

Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

## Features


- [#251](https://github.com/pybop-team/PyBOP/pull/251) - Increment PyBaMM > v25.3, remove redundant tests within integration tests, increment citation version, fix examples with incorrect model definitions.
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
- [#285](https://github.com/pybop-team/PyBOP/pull/285) - Drop support for Python 3.8.
- [#275](https://github.com/pybop-team/PyBOP/pull/275) - Adds Maximum a Posteriori (MAP) cost function with corresponding tests.
- [#273](https://github.com/pybop-team/PyBOP/pull/273) - Adds notebooks to nox examples session and updates CI workflows for change.
Expand Down
2 changes: 1 addition & 1 deletion CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ authors:
family-names: Courtier
- given-names: David
family-names: Howey
version: "23.12" # Update this when you release a new version
version: "24.3" # Update this when you release a new version
repository-code: 'https://www.github.com/pybop-team/pybop'
2 changes: 1 addition & 1 deletion examples/scripts/spm_adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# Parameter set and model definition
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
model = pybop.lithium_ion.SPMe(parameter_set=parameter_set)
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = [
Expand Down
2 changes: 1 addition & 1 deletion examples/scripts/spm_descent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# Parameter set and model definition
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
model = pybop.lithium_ion.SPMe(parameter_set=parameter_set)
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = [
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ classifiers = [
]
requires-python = ">=3.9, <3.13"
dependencies = [
"pybamm>=23.5",
"pybamm>23.5",
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
"numpy>=1.16",
"scipy>=1.3",
"pints>=0.5",
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/build_matrix.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
python_version=("3.9" "3.10" "3.11" "3.12")
os=("ubuntu-latest" "windows-latest" "macos-latest")
# This command fetches the last three PyBaMM versions excluding release candidates from PyPI
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
pybamm_version=($(curl -s https://pypi.org/pypi/pybamm/json | jq -r '.releases | keys[]' | grep -v rc | tail -n 3 | paste -sd " " -))
pybamm_version=($(curl -s https://pypi.org/pypi/pybamm/json | jq -r '.releases | keys[]' | grep -v rc | tail -n 2 | paste -sd " " -))

# open dict
json='{
Expand Down
113 changes: 113 additions & 0 deletions tests/integration/test_optimisation_options.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import numpy as np
import pytest

import pybop


class TestOptimisation:
"""
A class to run integration tests on the Optimisation class.
"""

@pytest.fixture(autouse=True)
def setup(self):
self.ground_truth = np.array([0.55, 0.55]) + np.random.normal(
loc=0.0, scale=0.05, size=2
)

@pytest.fixture
def model(self):
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
return pybop.lithium_ion.SPM(parameter_set=parameter_set)

@pytest.fixture
def parameters(self):
return [
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.55, 0.05),
bounds=[0.375, 0.75],
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.55, 0.05),
# no bounds
),
]

@pytest.fixture(
params=[
pybop.GaussianLogLikelihoodKnownSigma,
pybop.RootMeanSquaredError,
pybop.SumSquaredError,
]
)
def cost_class(self, request):
return request.param

def noise(self, sigma, values):
return np.random.normal(0, sigma, values)

@pytest.fixture
def spm_costs(self, model, parameters, cost_class):
# Form dataset
init_soc = 0.5
solution = self.getdata(model, self.ground_truth, init_soc)
dataset = pybop.Dataset(
{
"Time [s]": solution["Time [s]"].data,
"Current function [A]": solution["Current [A]"].data,
"Voltage [V]": solution["Voltage [V]"].data
+ self.noise(0.002, len(solution["Time [s]"].data)),
}
)

# Define the cost to optimise
problem = pybop.FittingProblem(model, parameters, dataset, init_soc=init_soc)
if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma=[0.03, 0.03])
else:
return cost_class(problem)

@pytest.mark.parametrize(
"f_guessed",
[
True,
False,
],
)
@pytest.mark.integration
def test_optimisation_f_guessed(self, f_guessed, spm_costs):
# Test each optimiser
parameterisation = pybop.Optimisation(
cost=spm_costs, optimiser=pybop.XNES, sigma0=0.05
)
parameterisation.set_max_unchanged_iterations(iterations=35, threshold=1e-5)
parameterisation.set_max_iterations(125)
parameterisation.set_f_guessed_tracking(f_guessed)

initial_cost = parameterisation.cost(spm_costs.x0)
x, final_cost = parameterisation.run()

# Assertions
assert initial_cost > final_cost
np.testing.assert_allclose(x, self.ground_truth, atol=2.5e-2)

def getdata(self, model, x, init_soc):
model.parameter_set.update(
{
"Negative electrode active material volume fraction": x[0],
"Positive electrode active material volume fraction": x[1],
}
)
experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 3 minutes (1 second period)",
"Charge at 0.5C for 3 minutes (1 second period)",
),
]
* 2
)
sim = model.predict(init_soc=init_soc, experiment=experiment)
return sim
50 changes: 11 additions & 39 deletions tests/integration/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
)

# Define the cost to optimise
signal = ["Voltage [V]"]
problem = pybop.FittingProblem(
model, parameters, dataset, signal=signal, init_soc=init_soc
)
problem = pybop.FittingProblem(model, parameters, dataset, init_soc=init_soc)
if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma=[0.03, 0.03])
elif cost_class in [pybop.MAP]:
Expand Down Expand Up @@ -99,7 +96,9 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
def test_spm_optimisers(self, optimiser, spm_costs):
# Some optimisers require a complete set of bounds
if optimiser in [pybop.SciPyDifferentialEvolution, pybop.PSO]:
spm_costs.problem.parameters[1].set_bounds([0.375, 0.75])
spm_costs.problem.parameters[1].set_bounds(
[0.3, 0.8]
) # Large range to ensure IC within bounds
bounds = {"lower": [], "upper": []}
for param in spm_costs.problem.parameters:
bounds["lower"].append(param.bounds[0])
Expand All @@ -111,40 +110,18 @@ def test_spm_optimisers(self, optimiser, spm_costs):
parameterisation = pybop.Optimisation(
cost=spm_costs, optimiser=optimiser, sigma0=0.05
)
parameterisation.set_max_unchanged_iterations(iterations=35, threshold=5e-4)
parameterisation.set_max_unchanged_iterations(iterations=35, threshold=1e-5)
parameterisation.set_max_iterations(125)

initial_cost = parameterisation.cost(spm_costs.x0)

if optimiser in [pybop.CMAES]:
parameterisation.set_f_guessed_tracking(True)
parameterisation.cost.problem.model.allow_infeasible_solutions = False
assert parameterisation._use_f_guessed is True
parameterisation.set_max_iterations(1)
x, final_cost = parameterisation.run()

parameterisation.set_f_guessed_tracking(False)
parameterisation.set_max_iterations(125)

x, final_cost = parameterisation.run()
assert parameterisation._max_iterations == 125

elif optimiser in [pybop.GradientDescent]:
if optimiser in [pybop.GradientDescent]:
if isinstance(
spm_costs, (pybop.GaussianLogLikelihoodKnownSigma, pybop.MAP)
):
parameterisation.optimiser.set_learning_rate(1.8e-5)
parameterisation.set_min_iterations(150)
else:
parameterisation.optimiser.set_learning_rate(0.02)
parameterisation.set_max_iterations(150)
x, final_cost = parameterisation.run()

elif optimiser in [pybop.SciPyDifferentialEvolution]:
with pytest.raises(ValueError):
parameterisation.optimiser.set_population_size(-5)

parameterisation.optimiser.set_population_size(5)
x, final_cost = parameterisation.run()

elif optimiser in [pybop.SciPyMinimize]:
Expand Down Expand Up @@ -201,7 +178,9 @@ def spm_two_signal_cost(self, parameters, model, cost_class):
def test_multiple_signals(self, multi_optimiser, spm_two_signal_cost):
# Some optimisers require a complete set of bounds
if multi_optimiser in [pybop.SciPyDifferentialEvolution]:
spm_two_signal_cost.problem.parameters[1].set_bounds([0.375, 0.75])
spm_two_signal_cost.problem.parameters[1].set_bounds(
[0.3, 0.8]
) # Large range to ensure IC within bounds
bounds = {"lower": [], "upper": []}
for param in spm_two_signal_cost.problem.parameters:
bounds["lower"].append(param.bounds[0])
Expand All @@ -217,10 +196,6 @@ def test_multiple_signals(self, multi_optimiser, spm_two_signal_cost):
parameterisation.set_max_iterations(125)

initial_cost = parameterisation.cost(spm_two_signal_cost.x0)

if multi_optimiser in [pybop.SciPyDifferentialEvolution]:
parameterisation.optimiser.set_population_size(5)

x, final_cost = parameterisation.run()

# Assertions
Expand All @@ -233,7 +208,7 @@ def test_model_misparameterisation(self, parameters, model, init_soc):
# Define two different models with different parameter sets
# The optimisation should fail as the models are not the same
second_parameter_set = pybop.ParameterSet.pybamm("Ecker2015")
second_model = pybop.lithium_ion.SPM(parameter_set=second_parameter_set)
second_model = pybop.lithium_ion.SPMe(parameter_set=second_parameter_set)

# Form dataset
solution = self.getdata(second_model, self.ground_truth, init_soc)
Expand All @@ -246,10 +221,7 @@ def test_model_misparameterisation(self, parameters, model, init_soc):
)

# Define the cost to optimise
signal = ["Voltage [V]"]
problem = pybop.FittingProblem(
model, parameters, dataset, signal=signal, init_soc=init_soc
)
problem = pybop.FittingProblem(model, parameters, dataset, init_soc=init_soc)
cost = pybop.RootMeanSquaredError(problem)

# Select optimiser
Expand Down
21 changes: 16 additions & 5 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,23 @@ def test_optimiser_classes(self, two_param_cost, optimiser_class, expected_name)

# Test without bounds
cost.bounds = None
if optimiser_class in [pybop.SciPyDifferentialEvolution]:
if optimiser_class in [pybop.SciPyMinimize]:
opt = pybop.Optimisation(cost=cost, optimiser=optimiser_class)
assert opt.optimiser.bounds is None
elif optimiser_class in [pybop.SciPyDifferentialEvolution]:
with pytest.raises(ValueError):
pybop.Optimisation(cost=cost, optimiser=optimiser_class)
else:
opt = pybop.Optimisation(cost=cost, optimiser=optimiser_class)
assert opt.optimiser.boundaries is None

# Test setting population size
if optimiser_class in [pybop.SciPyDifferentialEvolution]:
with pytest.raises(ValueError):
opt.optimiser.set_population_size(-5)

if optimiser_class in [pybop.SciPyMinimize]:
assert opt.optimiser.bounds is None
else:
assert opt.optimiser.boundaries is None
# Correct value
opt.optimiser.set_population_size(5)

@pytest.mark.unit
def test_single_parameter(self, cost):
Expand Down Expand Up @@ -150,6 +157,10 @@ def test_halting(self, cost):
x, __ = optim.run()
assert optim._iterations == 2

# Test guessed values
optim.set_f_guessed_tracking(True)
assert optim._use_f_guessed is True

# Test invalid values
with pytest.raises(ValueError):
optim.set_max_evaluations(-1)
Expand Down
Loading