Example notebook
[1]:
import matplotlib.pyplot as plt
from pommes import test_case_path
from pommes.io.build_input_dataset import (
build_input_parameters,
read_config_file,
)
from pommes.io.save_solution import save_solution
from pommes.model.build_model import build_model
from pommes.model.data_validation.dataset_check import check_inputs
[2]:
scenario = "ref"
suffix = "_02161113"
output_folder = f"{test_case_path}/output/{scenario}{suffix}"
solver = "highs" # ["gurobi", "xpress", "highs", "mosek"]
[3]:
config = read_config_file(file_path=test_case_path / "config.yaml")
model_parameters = build_input_parameters(config)
model_parameters = check_inputs(model_parameters)
[4]:
model = build_model(model_parameters)
[5]:
model.solve(solver_name=solver)
save_solution(
model=model,
output_folder=output_folder,
model_parameters=model_parameters,
)
Running HiGHS 1.11.0 (git hash: n/a): Copyright (c) 2025 HiGHS under MIT licence terms
LP linopy-problem-72l8p7ti has 5612 rows; 5622 cols; 17008 nonzeros
Coefficient ranges:
Matrix [1e-02, 9e+05]
Cost [1e-02, 1e+00]
Bound [0e+00, 0e+00]
RHS [2e+00, 1e+03]
Presolving model
2864 rows, 2990 cols, 9352 nonzeros 0s
2032 rows, 2182 cols, 8040 nonzeros 0s
Dependent equations search running on 624 equations with time limit of 1000.00s
Dependent equations search removed 0 rows and 0 nonzeros in 0.00s (limit = 1000.00s)
1992 rows, 2134 cols, 7624 nonzeros 0s
Presolve : Reductions: rows 1992(-3620); columns 2134(-3488); elements 7624(-9384)
Solving the presolved LP
Using EKK dual simplex solver - serial
Iteration Objective Infeasibilities num(sum)
0 4.8609640334e-02 Pr: 220(49920) 0s
1054 1.9447000000e+05 Pr: 0(0); Du: 0(6.66481e-14) 0s
Solving the original LP from the solution after postsolve
Model name : linopy-problem-72l8p7ti
Model status : Optimal
Simplex iterations: 1054
Objective value : 1.9447000000e+05
P-D objective error : 0.0000000000e+00
HiGHS run time : 0.04
Writing the solution to /tmp/linopy-solve-a2mbugcy.sol
[6]:
solution = model.solution
print(model.solution)
<xarray.Dataset> Size: 71kB
Dimensions: (area: 2, hour: 10, resource: 4,
year_op: 4, conversion_tech: 5,
year_dec: 6, year_inv: 5,
storage_tech: 3, link: 2,
transport_tech: 3, combined_tech: 1,
mode: 2, hour_type: 5)
Coordinates: (12/13)
* area (area) <U6 48B 'area_1' 'area_2'
* hour (hour) int64 80B 0 1 2 3 4 5 6 7 8 9
* resource (resource) <U11 176B 'electricity' ....
* year_op (year_op) int64 32B 2020 2030 2040 2050
* conversion_tech (conversion_tech) <U12 240B 'electro...
* year_dec (year_dec) int64 48B 2020 2030 ... 2070
... ...
* storage_tech (storage_tech) <U13 156B 'battery' ....
* link (link) <U6 48B 'link_1' 'link_2'
* transport_tech (transport_tech) <U16 192B 'big_meth...
* combined_tech (combined_tech) <U15 60B 'electric_b...
* mode (mode) <U8 64B 'electric' 'fossil'
* hour_type (hour_type) <U3 60B 'HCE' 'HPE' ... 'P'
Data variables: (12/44)
operation_load_shedding_power (area, hour, resource, year_op) float64 3kB ...
operation_spillage_power (area, hour, resource, year_op) float64 3kB ...
operation_load_shedding_costs (area, year_op) float64 64B -0.0 ......
operation_spillage_costs (area, year_op) float64 64B -0.0 ......
annualised_totex (area, year_op) float64 64B 2.449e+0...
operation_conversion_power_capacity (area, conversion_tech, year_op) float64 320B ...
... ...
operation_carbon_emissions (area, hour, year_op) float64 640B -...
operation_total_carbon_emissions (area, hour, year_op) float64 640B -...
operation_carbon_costs (area, year_op) float64 64B -0.0 ......
operation_turpe_contract_power (area, hour_type, year_op) float64 320B ...
operation_turpe_variable_costs (area, year_op) float64 64B 0.0 ... 0.0
operation_turpe_fixed_costs (area, year_op) float64 64B -0.0 ......
[7]:
dual = model.dual
print(dual)
<xarray.Dataset> Size: 84kB
Dimensions: (area: 2, hour: 10,
resource: 4,
year_op: 4,
conversion_tech: 5,
year_inv: 5,
storage_tech: 3,
link: 2,
transport_tech: 3,
combined_tech: 1,
hour_type: 5)
Coordinates:
* area (area) <U6 48B 'ar...
* hour (hour) int64 80B 0...
* resource (resource) <U11 176B ...
* year_op (year_op) int64 32B ...
* conversion_tech (conversion_tech) <U12 240B ...
* year_inv (year_inv) int64 40B ...
* storage_tech (storage_tech) <U13 156B ...
* link (link) <U6 48B 'li...
* transport_tech (transport_tech) <U16 192B ...
* combined_tech (combined_tech) <U15 60B ...
* hour_type (hour_type) <U3 60B ...
Data variables: (12/65)
operation_adequacy_constraint (area, hour, resource, year_op) float64 3kB ...
operation_load_shedding_max_constraint (area, hour, resource, year_op) float64 3kB ...
operation_spillage_max_constraint (area, hour, resource, year_op) float64 3kB ...
operation_load_shedding_costs_def (area, year_op) float64 64B ...
operation_spillage_costs_def (area, year_op) float64 64B ...
annualised_totex_def (area, year_op) float64 64B ...
... ...
operation_total_carbon_emissions_def (area, hour, year_op) float64 640B ...
operation_carbon_costs_def (area, year_op) float64 64B ...
operation_turpe_contract_power_max_constraint (hour_type, area, hour, year_op) float64 3kB ...
operation_turpe_increasing_contract_power_constraint (area, hour_type, year_op) float64 320B ...
operation_turpe_variable_costs_def (year_op, area) float64 64B ...
operation_turpe_fixed_costs_def (area, year_op) float64 64B ...
[8]:
prices = dual.operation_adequacy_constraint
df = prices.to_dataframe(name="value").reset_index()
print(df)
area hour resource year_op value
0 area_1 0 electricity 2020 -0.00
1 area_1 0 electricity 2030 -0.00
2 area_1 0 electricity 2040 -0.00
3 area_1 0 electricity 2050 -0.00
4 area_1 0 heat 2020 986.00
.. ... ... ... ... ...
315 area_2 9 hydrogen 2050 -0.00
316 area_2 9 methane 2020 197.50
317 area_2 9 methane 2030 217.48
318 area_2 9 methane 2040 177.52
319 area_2 9 methane 2050 217.49
[320 rows x 5 columns]
[9]:
unique_resources = df["resource"].unique()
unique_years = df["year_op"].unique()
unique_areas = df["area"].unique()
colors = plt.cm.tab10(range(len(unique_areas)))
area_color_map = {area: colors[i] for i, area in enumerate(unique_areas)}
fig, axes = plt.subplots(
len(unique_resources),
len(unique_years),
figsize=(15, 10),
sharex=True,
sharey="row",
)
for i, resource in enumerate(unique_resources):
for j, year in enumerate(unique_years):
ax = axes[i, j]
subset = df[(df["resource"] == resource) & (df["year_op"] == year)]
for area in unique_areas:
area_data = subset[subset["area"] == area]
if not area_data.empty:
ax.plot(
area_data["hour"],
area_data["value"],
marker="o",
color=area_color_map[area],
)
if i == 0:
ax.set_title(f"Year: {year}")
if j == 0:
ax.set_ylabel(f"Resource: {resource}")
if i == len(unique_resources) - 1:
ax.set_xlabel("Hour")
legend_elements = [
plt.Line2D(
[0],
[0],
marker="o",
color=color,
linestyle="",
markersize=8,
label=area,
)
for area, color in area_color_map.items()
]
fig.legend(
handles=legend_elements,
loc="upper center",
ncol=len(unique_areas),
title="Area",
)
plt.tight_layout(rect=(0, 0, 1, 0.95))
plt.show()

[ ]: