Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into issue929
Browse files Browse the repository at this point in the history
  • Loading branch information
bknueven committed Aug 9, 2023
2 parents f86506d + e2ae67e commit 3d90146
Show file tree
Hide file tree
Showing 6 changed files with 92 additions and 78 deletions.
62 changes: 32 additions & 30 deletions watertap/costing/units/electrodialysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,6 @@ def cost_electrodialysis(blk, cost_electricity_flow=True, has_rectifier=False):
"""
t0 = blk.flowsheet().time.first()

cost_electrodialysis_stack(blk)

# Changed this to grab power from performance table which is identified
# by same key regardless of whether the Electrodialysis unit is 0D or 1D
if cost_electricity_flow:
Expand All @@ -75,20 +73,7 @@ def cost_electrodialysis(blk, cost_electricity_flow=True, has_rectifier=False):
else:
power = blk.unit_model.get_power_electrical(blk.flowsheet().time.first())
cost_rectifier(blk, power=power, ac_dc_conversion_efficiency=0.9)
blk.capital_cost_constraint = pyo.Constraint(
expr=blk.capital_cost
== pyo.units.convert(
blk.costing_package.electrodialysis.membrane_capital_cost
* (
2
* blk.unit_model.cell_pair_num
* blk.unit_model.cell_width
* blk.unit_model.cell_length
),
to_units=blk.costing_package.base_currency,
)
+ blk.capital_cost_rectifier
)
cost_electrodialysis_stack(blk)


def cost_electrodialysis_stack(blk):
Expand All @@ -100,22 +85,39 @@ def cost_electrodialysis_stack(blk):
"""
make_capital_cost_var(blk)
make_fixed_operating_cost_var(blk)

blk.capital_cost_constraint = pyo.Constraint(
expr=blk.capital_cost
== pyo.units.convert(
blk.costing_package.electrodialysis.membrane_capital_cost
* (
2
* blk.unit_model.cell_pair_num
* blk.unit_model.cell_width
* blk.unit_model.cell_length
if blk.find_component("capital_cost_rectifier") is not None:
blk.capital_cost_constraint = pyo.Constraint(
expr=blk.capital_cost
== pyo.units.convert(
blk.costing_package.electrodialysis.membrane_capital_cost
* (
2
* blk.unit_model.cell_pair_num
* blk.unit_model.cell_width
* blk.unit_model.cell_length
)
+ blk.costing_package.electrodialysis.stack_electrode_captical_cost
* (2 * blk.unit_model.cell_width * blk.unit_model.cell_length),
to_units=blk.costing_package.base_currency,
)
+ blk.capital_cost_rectifier
)
else:
blk.capital_cost_constraint = pyo.Constraint(
expr=blk.capital_cost
== pyo.units.convert(
blk.costing_package.electrodialysis.membrane_capital_cost
* (
2
* blk.unit_model.cell_pair_num
* blk.unit_model.cell_width
* blk.unit_model.cell_length
)
+ blk.costing_package.electrodialysis.stack_electrode_captical_cost
* (2 * blk.unit_model.cell_width * blk.unit_model.cell_length),
to_units=blk.costing_package.base_currency,
)
+ blk.costing_package.electrodialysis.stack_electrode_captical_cost
* (2 * blk.unit_model.cell_width * blk.unit_model.cell_length),
to_units=blk.costing_package.base_currency,
)
)
blk.fixed_operating_cost_constraint = pyo.Constraint(
expr=blk.fixed_operating_cost
== pyo.units.convert(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def main():
solve(m, solver=solver, checkpoint="solve flowsheet after initializing system")

print("\n***---Simulation results---***")
m.fs.EDstack.report()
display_model_metrics(m)

# Perform an optimization over selected variables
Expand All @@ -60,6 +61,7 @@ def main():
m, solver=solver, checkpoint="solve flowsheet after optimizing system"
)
print("\n***---Optimization results---***")
m.fs.EDstack.report()
display_model_metrics(m)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_specific_operating_conditions(self, ED1D1Stack_conc_recirc):
assert value(m.fs.costing.specific_energy_consumption) == pytest.approx(
0.1192, abs=0.001
)
assert value(m.fs.costing.LCOW) == pytest.approx(0.3626, rel=1e-3)
assert value(m.fs.costing.LCOW) == pytest.approx(0.397526, rel=1e-3)
assert value(m.fs.EDstack.inlet_concentrate.pressure[0]) == pytest.approx(
169278.127, rel=1e-3
)
Expand Down Expand Up @@ -114,24 +114,24 @@ def test_optimization(self):
) == pytest.approx(2.00, rel=1e-3)
assert value(m.fs.product_salinity) == pytest.approx(0.1000, rel=1e-3)
assert value(m.fs.disposal_salinity) == pytest.approx(6.4333, rel=1e-3)
assert value(m.fs.mem_area) == pytest.approx(12.5949, rel=1e-3)
assert value(m.fs.EDstack.cell_pair_num) == pytest.approx(12, rel=1e-8)
assert value(m.fs.EDstack.cell_length) == pytest.approx(5.327791, rel=1e-3)
assert value(m.fs.mem_area) == pytest.approx(13.8159, rel=1e-3)
assert value(m.fs.EDstack.cell_pair_num) == pytest.approx(14, rel=1e-8)
assert value(m.fs.EDstack.cell_length) == pytest.approx(5.0094, rel=1e-3)
assert value(m.fs.EDstack.voltage_applied[0]) == pytest.approx(
14.595398, rel=1e-3
16.8753, rel=1e-3
)
assert value(m.fs.costing.specific_energy_consumption) == pytest.approx(
1.6713, rel=1e-3
1.6044, rel=1e-3
)
assert value(m.fs.costing.LCOW) == pytest.approx(0.4829, rel=1e-3)
assert value(m.fs.costing.LCOW) == pytest.approx(0.5904, rel=1e-3)
assert value(m.fs.EDstack.inlet_concentrate.pressure[0]) == pytest.approx(
1.112460e06, rel=1e-3
916385.788228, rel=1e-3
)
assert value(m.fs.EDstack.outlet_concentrate.pressure[0]) == pytest.approx(
101325.00, rel=1e-3
)
assert value(m.fs.EDstack.inlet_diluate.pressure[0]) == pytest.approx(
1.112460e06, rel=1e-3
916385.788228, rel=1e-3
)
assert value(m.fs.EDstack.outlet_diluate.pressure[0]) == pytest.approx(
101325.00, rel=1e-3
Expand Down
48 changes: 20 additions & 28 deletions watertap/tools/parameter_sweep/parameter_sweep.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ def _default_optimize(model, options=None, tee=False):


class _ParameterSweepBase(ABC):

CONFIG = ParameterSweepWriter.CONFIG()

CONFIG.declare(
Expand Down Expand Up @@ -159,7 +158,6 @@ def __init__(
self,
**options,
):

parallel_manager_class = options.pop("parallel_manager_class", None)

self.config = self.CONFIG(options)
Expand All @@ -181,7 +179,6 @@ def __init__(

@staticmethod
def assign_variable_names(model, outputs):

# Only assign output variable names to unassigned outputs
exprs = pyo.Expression(pyo.Any)
model.add_component(
Expand All @@ -194,7 +191,6 @@ def assign_variable_names(model, outputs):
outputs[output_name] = exprs[output_name]

def _publish_updates(self, iteration, solve_status, solve_time):

if self.config.publish_progress:
publish_dict = {
"worker_number": self.parallel_manager.get_rank(),
Expand Down Expand Up @@ -297,7 +293,6 @@ def _build_combinations(self, d, sampling_type, num_samples):
return global_combo_array

def _divide_combinations(self, global_combo_array):

# Split the total list of combinations into NUM_PROCS chunks,
# one per each of the MPI ranks
# divided_combo_array = np.array_split(global_combo_array, num_procs, axis=0)
Expand All @@ -311,9 +306,7 @@ def _divide_combinations(self, global_combo_array):
return local_combo_array

def _update_model_values(self, m, param_dict, values):

for k, item in enumerate(param_dict.values()):

param = item.pyomo_object

if param.is_variable_type():
Expand All @@ -328,7 +321,6 @@ def _update_model_values(self, m, param_dict, values):
raise RuntimeError(f"Unrecognized Pyomo object {param}")

def _aggregate_results_arr(self, global_results_dict, num_cases):

global_results = np.zeros(
(num_cases, len(global_results_dict["outputs"])), dtype=float
)
Expand All @@ -342,12 +334,10 @@ def _aggregate_results_arr(self, global_results_dict, num_cases):
return global_results

def _process_sweep_params(self, sweep_params):

sampling_type = None

# Check the list of parameters to make sure they are valid
for k in sweep_params:

# Convert to using Sample class
if isinstance(sweep_params[k], (list, tuple)):
sweep_params[k] = LinearSample(*sweep_params[k])
Expand All @@ -364,7 +354,6 @@ def _process_sweep_params(self, sweep_params):
return sweep_params, sampling_type

def _create_local_output_skeleton(self, model, sweep_params, outputs, num_samples):

output_dict = {}
output_dict["sweep_params"] = {}
output_dict["outputs"] = {}
Expand Down Expand Up @@ -398,7 +387,6 @@ def _create_local_output_skeleton(self, model, sweep_params, outputs, num_sample
return output_dict

def _create_component_output_skeleton(self, component, num_samples):

comp_dict = {}
comp_dict["value"] = np.zeros(num_samples, dtype=float)
if hasattr(component, "lb"):
Expand All @@ -421,7 +409,6 @@ def _create_component_output_skeleton(self, component, num_samples):
def _update_local_output_dict(
self, model, sweep_params, case_number, run_successful, output_dict
):

# Get the inputs
op_ps_dict = output_dict["sweep_params"]
for key, item in sweep_params.items():
Expand All @@ -446,7 +433,6 @@ def _update_local_output_dict(
output_dict["outputs"][label]["value"][case_number] = np.nan

def _create_global_output(self, local_output_dict, req_num_samples=None):

# We make the assumption that the parameter sweep is running the same
# flowsheet num_samples number of times, i.e., the structure of the
# local_output_dict remains the same across all mpi_ranks
Expand Down Expand Up @@ -512,7 +498,6 @@ def _create_global_output(self, local_output_dict, req_num_samples=None):
return global_output_dict

def _param_sweep_kernel(self, model, reinitialize_values):

optimize_function = self.config.optimize_function
optimize_kwargs = self.config.optimize_kwargs
reinitialize_before_sweep = self.config.reinitialize_before_sweep
Expand Down Expand Up @@ -604,7 +589,6 @@ def _run_sample(
return run_successful

def _do_param_sweep(self, model, sweep_params, outputs, local_values):

# Initialize space to hold results
local_num_cases = np.shape(local_values)[0]

Expand Down Expand Up @@ -650,7 +634,6 @@ def parameter_sweep(self, *args, **kwargs):


class ParameterSweep(_ParameterSweepBase):

CONFIG = _ParameterSweepBase.CONFIG()

@classmethod
Expand Down Expand Up @@ -767,7 +750,6 @@ def run_scatter_gather(
build_outputs,
all_parameter_combinations,
):

# save a reference to the parallel manager since it will be removed
# along with the other unpicklable state
parallel_manager = self.parallel_manager
Expand Down Expand Up @@ -805,7 +787,6 @@ def parameter_sweep(
build_model_kwargs=None,
build_sweep_params_kwargs=None,
):

build_model_kwargs = (
build_model_kwargs if build_model_kwargs is not None else dict()
)
Expand Down Expand Up @@ -833,6 +814,9 @@ def parameter_sweep(
version="0.10.0",
)

if build_outputs is None:
build_outputs = return_none

if not callable(build_outputs):
_combined_outputs = build_outputs
build_outputs = lambda model, sweep_params: _combined_outputs
Expand All @@ -841,7 +825,13 @@ def parameter_sweep(
and will not work with future implementations of parallelism.",
version="0.10.0",
)

# add build functions and kwargs to instance for use with custom function
# this might be better to move all of these to Config instead
self.build_model = build_model
self.build_sweep_params = build_sweep_params
self.build_outputs = build_outputs
self.build_model_kwargs = build_model_kwargs
self.build_sweep_params_kwargs = build_sweep_params_kwargs
# create the list of all combinations - needed for some aspects of scattering
model = build_model(**build_model_kwargs)
sweep_params = build_sweep_params(model, **build_sweep_params_kwargs)
Expand Down Expand Up @@ -881,13 +871,11 @@ def parameter_sweep(


class RecursiveParameterSweep(_ParameterSweepBase):

CONFIG = _ParameterSweepBase.CONFIG()

def _filter_recursive_solves(
self, model, sweep_params, outputs, recursive_local_dict
):

# Figure out how many filtered solves did this rank actually do
filter_counter = 0
for case, content in recursive_local_dict.items():
Expand Down Expand Up @@ -929,7 +917,6 @@ def _filter_recursive_solves(
return local_filtered_dict, filter_counter

def _aggregate_filtered_input_arr(self, global_filtered_dict, req_num_samples):

global_filtered_values = np.zeros(
(req_num_samples, len(global_filtered_dict["sweep_params"])),
dtype=float,
Expand All @@ -946,7 +933,6 @@ def _aggregate_filtered_input_arr(self, global_filtered_dict, req_num_samples):
return global_filtered_values

def _aggregate_filtered_results(self, local_filtered_dict, req_num_samples):

global_filtered_dict = self._create_global_output(
local_filtered_dict, req_num_samples
)
Expand All @@ -972,7 +958,6 @@ def parameter_sweep(
req_num_samples=None,
seed=None,
):

# Convert sweep_params to LinearSamples
sweep_params, sampling_type = self._process_sweep_params(sweep_params)

Expand All @@ -988,7 +973,6 @@ def parameter_sweep(

local_output_collection = {}
for loop_ctr in range(10):

if n_samples_remaining <= 0:
break

Expand Down Expand Up @@ -1148,10 +1132,18 @@ def do_execute(
"""

if param_sweep_instance.config.custom_do_param_sweep is not None:
return param_sweep_instance.custom_do_param_sweep(
model, sweep_params, outputs, local_combo_array
return param_sweep_instance.config.custom_do_param_sweep(
param_sweep_instance, model, sweep_params, outputs, local_combo_array
)

return param_sweep_instance._do_param_sweep(
model, sweep_params, outputs, local_combo_array
)


def return_none(model, sweep_params):
"""
Used so that build_outputs=None is a valid usage of the parameter sweep tool
without requiring the user to wrap it in a function.
"""
return None
8 changes: 7 additions & 1 deletion watertap/unit_models/electrodialysis_1D.py
Original file line number Diff line number Diff line change
Expand Up @@ -2862,10 +2862,16 @@ def _get_performance_contents(self, time_point=0):
"Total electrical power consumption(Watt)": self.diluate.power_electrical_x[
time_point, self.diluate.length_domain.last()
],
"Specific electrical power consumption (kW*h/m**3)": self.specific_power_electrical[
"Specific electrical power consumption, ED stack (kW*h/m**3)": self.specific_power_electrical[
time_point
],
"Water recovery by mass": self.recovery_mass_H2O[time_point],
"Channel inlet velocity, diluate (m/s)": self.velocity_diluate[
time_point, self.diluate.length_domain.first()
],
"Channel inlet velocity, concentrate (m/s)": self.velocity_concentrate[
time_point, self.diluate.length_domain.first()
],
},
"exprs": {},
"params": {},
Expand Down
Loading

0 comments on commit 3d90146

Please sign in to comment.