diff --git a/Snakefile b/Snakefile index 9610490a5..2a2956a27 100644 --- a/Snakefile +++ b/Snakefile @@ -560,6 +560,7 @@ rule add_electricity: rule simplify_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], renewable=config["renewable"], geo_crs=config["crs"]["geo_crs"], cluster_options=config["cluster_options"], diff --git a/config.default.yaml b/config.default.yaml index 91ae2c53f..fb1bbffbc 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -65,7 +65,7 @@ cluster_options: remove_stubs_across_borders: true p_threshold_drop_isolated: 20 # [MW] isolated buses are being discarded if bus mean power is below the specified threshold p_threshold_merge_isolated: 300 # [MW] isolated buses are being merged into a single isolated bus if a bus mean power is below the specified threshold - s_threshold_fetch_isolated: 0.05 # [-] a share of the national load for merging an isolated network into a backbone network + s_threshold_fetch_isolated: false # [-] a share of the national load for merging an isolated network into a backbone network cluster_network: algorithm: kmeans feature: solar+onwind-time @@ -396,8 +396,6 @@ monte_carlo: type: beta args: [0.5, 2] - - solving: options: formulation: kirchhoff @@ -411,15 +409,63 @@ solving: #nhours: 10 solver: name: gurobi - threads: 4 - method: 2 # barrier (=ipm) - crossover: 0 - BarConvTol: 1.e-5 - FeasibilityTol: 1.e-6 - AggFill: 0 - PreDual: 0 - GURO_PAR_BARDENSETHRESH: 200 - + options: gurobi-default + + solver_options: + highs-default: + # refer to https://ergo-code.github.io/HiGHS/options/definitions.html#solver + threads: 4 + solver: "ipm" + run_crossover: "off" + small_matrix_value: 1e-6 + large_matrix_value: 1e9 + primal_feasibility_tolerance: 1e-5 + dual_feasibility_tolerance: 1e-5 + ipm_optimality_tolerance: 1e-4 + parallel: "on" + random_seed: 123 + gurobi-default: + threads: 4 + method: 2 # barrier + crossover: 0 + BarConvTol: 1.e-6 + Seed: 123 + AggFill: 0 + PreDual: 0 + GURO_PAR_BARDENSETHRESH: 200 + gurobi-numeric-focus: + name: gurobi + NumericFocus: 3 # Favour numeric stability over speed + method: 2 # barrier + crossover: 0 # do not use crossover + BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-4 + OptimalityTol: 1.e-4 + ObjScale: -0.5 + threads: 8 + Seed: 123 + gurobi-fallback: # Use gurobi defaults + name: gurobi + crossover: 0 + method: 2 # barrier + BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-5 + OptimalityTol: 1.e-5 + Seed: 123 + threads: 8 + cplex-default: + threads: 4 + lpmethod: 4 # barrier + solutiontype: 2 # non basic solution, ie no crossover + barrier.convergetol: 1.e-4 #5 + feasopt.tolerance: 1.e-5 #6 + cbc-default: {} # Used in CI + glpk-default: {} # Used in CI + + mem_mb: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2 + runtime: 6h #runtime in humanfriendly style https://humanfriendly.readthedocs.io/en/latest/ plotting: map: diff --git a/config.tutorial.yaml b/config.tutorial.yaml index e6be7cf5b..f0960dd72 100644 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -78,7 +78,7 @@ cluster_options: remove_stubs_across_borders: true p_threshold_drop_isolated: 20 # [MW] isolated buses are being discarded if bus mean power is below the specified threshold p_threshold_merge_isolated: 300 # [MW] isolated buses are being merged into a single isolated bus if a bus mean power is below the specified threshold - s_threshold_fetch_isolated: 0.05 # [-] a share of the national load for merging an isolated network into a backbone network + s_threshold_fetch_isolated: false # [-] a share of the national load for merging an isolated network into a backbone network cluster_network: algorithm: kmeans feature: solar+onwind-time @@ -405,6 +405,63 @@ solving: #nhours: 10 solver: name: glpk + options: glpk-default + + solver_options: + highs-default: + # refer to https://ergo-code.github.io/HiGHS/options/definitions.html#solver + threads: 4 + solver: "ipm" + run_crossover: "off" + small_matrix_value: 1e-6 + large_matrix_value: 1e9 + primal_feasibility_tolerance: 1e-5 + dual_feasibility_tolerance: 1e-5 + ipm_optimality_tolerance: 1e-4 + parallel: "on" + random_seed: 123 + gurobi-default: + threads: 4 + method: 2 # barrier + crossover: 0 + BarConvTol: 1.e-6 + Seed: 123 + AggFill: 0 + PreDual: 0 + GURO_PAR_BARDENSETHRESH: 200 + gurobi-numeric-focus: + name: gurobi + NumericFocus: 3 # Favour numeric stability over speed + method: 2 # barrier + crossover: 0 # do not use crossover + BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-4 + OptimalityTol: 1.e-4 + ObjScale: -0.5 + threads: 8 + Seed: 123 + gurobi-fallback: # Use gurobi defaults + name: gurobi + crossover: 0 + method: 2 # barrier + BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-5 + OptimalityTol: 1.e-5 + Seed: 123 + threads: 8 + cplex-default: + threads: 4 + lpmethod: 4 # barrier + solutiontype: 2 # non basic solution, ie no crossover + barrier.convergetol: 1.e-4 #5 + feasopt.tolerance: 1.e-5 #6 + cbc-default: {} # Used in CI + glpk-default: {} # Used in CI + + mem_mb: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2 + runtime: 6h #runtime in humanfriendly style https://humanfriendly.readthedocs.io/en/latest/ plotting: diff --git a/doc/release_notes.rst b/doc/release_notes.rst index acf736f5a..aa7850775 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,6 +24,8 @@ E.g. if a new rule becomes available describe how to use it `snakemake -j1 run_t * Add an option to use csv format for custom demand imports. `PR #995 `__ +* Re-implement network optimisation using linopy methods. `PR #796 `__ + **Minor Changes and bug-fixing** * Minor bug-fixing to run the cluster wildcard min `PR #1019 `__ diff --git a/envs/environment.yaml b/envs/environment.yaml index 207a8858c..bd95dda3b 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build -- pypsa>=0.24, <0.25 +- pypsa # - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask - powerplantmatching>=0.5.7 @@ -27,6 +27,7 @@ dependencies: - memory_profiler - ruamel.yaml<=0.17.26 - pytables +- pyscipopt # added to compy with the quadratic objective requirement of the clustering script - lxml - numpy - pandas @@ -77,7 +78,7 @@ dependencies: # Default solver for tests (required for CI) - glpk -- ipopt<3.13.3 +- ipopt #<3.13.3 - gurobi - pip: diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 2307e6d4a..5538f6525 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -453,31 +453,6 @@ def dlProgress(count, blockSize, totalSize, roundto=roundto): urllib.request.urlretrieve(url, file, reporthook=dlProgress, data=data) -def get_aggregation_strategies(aggregation_strategies): - """ - Default aggregation strategies that cannot be defined in .yaml format must - be specified within the function, otherwise (when defaults are passed in - the function's definition) they get lost when custom values are specified - in the config. - """ - import numpy as np - - # to handle the new version of PyPSA. - try: - from pypsa.clustering.spatial import _make_consense - except Exception: - # TODO: remove after new release and update minimum pypsa version - from pypsa.clustering.spatial import _make_consense - - bus_strategies = dict(country=_make_consense("Bus", "country")) - bus_strategies.update(aggregation_strategies.get("buses", {})) - - generator_strategies = {"build_year": lambda x: 0, "lifetime": lambda x: np.inf} - generator_strategies.update(aggregation_strategies.get("generators", {})) - - return bus_strategies, generator_strategies - - def mock_snakemake(rulename, **wildcards): """ This function is expected to be executed from the "scripts"-directory of " diff --git a/scripts/base_network.py b/scripts/base_network.py index 04e0c388d..eaeeb73b4 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -513,6 +513,18 @@ def base_network( if hvdc_as_lines_config: lines = pd.concat([lines_ac, lines_dc]) + lines.drop( + labels=[ + "bus0_lon", + "bus0_lat", + "bus1_lon", + "bus1_lat", + "bus_0_coors", + "bus_1_coors", + ], + axis=1, + inplace=True, + ) n.import_components_from_dataframe(lines, "Line") else: lines_dc = _set_electrical_parameters_links(links_config, lines_dc) @@ -522,6 +534,18 @@ def base_network( axis=1, result_type="reduce", ) + lines_ac.drop( + labels=[ + "bus0_lon", + "bus0_lat", + "bus1_lon", + "bus1_lat", + "bus_0_coors", + "bus_1_coors", + ], + axis=1, + inplace=True, + ) n.import_components_from_dataframe(lines_ac, "Line") # The columns which names starts with "bus" are mixed up with the third-bus specification # when executing additional_linkports() diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 34b116a99..00cbbe724 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -125,15 +125,14 @@ from functools import reduce import geopandas as gpd +import linopy import numpy as np import pandas as pd -import pyomo.environ as po import pypsa from _helpers import ( REGION_COLS, configure_logging, create_logger, - get_aggregation_strategies, sets_path_to_root, update_p_nom_max, ) @@ -336,47 +335,22 @@ def distribute_clusters( distribution_factor.sum(), 1.0, rtol=1e-3 ), f"Country weights L must sum up to 1.0 when distributing clusters. Is {distribution_factor.sum()}." - m = po.ConcreteModel() - - def n_bounds(model, *n_id): - """ - Create a function that makes a bound pair for pyomo. - - Use n_bounds(model, n_id) if N is Single-Index - Use n_bounds(model, *n_id) if N is Multi-Index - Example: https://pyomo.readthedocs.io/en/stable/pyomo_modeling_components/Variables.html - - Returns - ------- - bounds = A function (or Python object) that gives a (lower,upper) bound pair i.e.(1,10) for the variable - """ - return (1, N[n_id]) - - m.n = po.Var(list(distribution_factor.index), bounds=n_bounds, domain=po.Integers) - m.tot = po.Constraint(expr=(po.summation(m.n) == n_clusters)) - m.objective = po.Objective( - expr=sum( - (m.n[i] - distribution_factor.loc[i] * n_clusters) ** 2 - for i in distribution_factor.index - ), - sense=po.minimize, + m = linopy.Model() + clusters = m.add_variables( + lower=1, upper=N, coords=[L.index], name="n", integer=True ) - - opt = po.SolverFactory(solver_name) - if not opt.has_capability("quadratic_objective"): - logger.warning( - f"The configured solver `{solver_name}` does not support quadratic objectives. Falling back to `ipopt`." + m.add_constraints(clusters.sum() == n_clusters, name="tot") + # leave out constant in objective (L * n_clusters) ** 2 + m.objective = (clusters * clusters - 2 * clusters * L * n_clusters).sum() + if solver_name == "gurobi": + logging.getLogger("gurobipy").propagate = False + elif solver_name != "scip": + logger.info( + f"The configured solver `{solver_name}` does not support quadratic objectives. Falling back to `scip`." ) - opt = po.SolverFactory("ipopt") - - results = opt.solve(m) - assert ( - results["Solver"][0]["Status"] == "ok" - ), f"Solver returned non-optimally: {results}" - - return ( - pd.Series(m.n.get_values(), index=distribution_factor.index).round().astype(int) - ) + solver_name = "scip" + m.solve(solver_name=solver_name) + return m.solution["n"].to_series().astype(int) def busmap_for_gadm_clusters(inputs, n, gadm_level, geo_crs, country_list): @@ -577,9 +551,10 @@ def clustering_for_n_clusters( extended_link_costs=0, focus_weights=None, ): - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + line_strategies.update({"geometry": "first", "bounds": "first"}) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) if not isinstance(custom_busmap, pd.Series): if alternative_clustering: @@ -605,12 +580,20 @@ def clustering_for_n_clusters( clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=aggregate_carriers, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=line_length_factor, + line_strategies=line_strategies, generator_strategies=generator_strategies, + bus_strategies={ + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 72a44355d..e009dbd42 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -93,16 +93,10 @@ import pandas as pd import pypsa import scipy as sp -from _helpers import ( - configure_logging, - create_logger, - get_aggregation_strategies, - update_p_nom_max, -) +from _helpers import configure_logging, create_logger, update_p_nom_max from add_electricity import load_costs from cluster_network import cluster_regions, clustering_for_n_clusters from pypsa.clustering.spatial import ( - aggregategenerators, aggregateoneport, busmap_by_stubs, get_clustering_from_busmap, @@ -276,11 +270,15 @@ def replace_components(n, c, df, pnl): _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) - _, generator_strategies = get_aggregation_strategies(aggregation_strategies) + generator_strategies = aggregation_strategies["generators"] carriers = set(n.generators.carrier) - set(exclude_carriers) - generators, generators_pnl = aggregategenerators( - n, busmap, carriers=carriers, custom_strategies=generator_strategies + generators, generators_pnl = aggregateoneport( + n, + busmap, + "Generator", + carriers=carriers, + custom_strategies=generator_strategies, ) replace_components(n, "Generator", generators, generators_pnl) @@ -588,19 +586,28 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None): if not dist.empty: busmap.loc[buses_i] = dist.idxmin(1) - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + line_strategies.update({"geometry": "first", "bounds": "first"}) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies={ + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) return clustering.network, busmap @@ -848,19 +855,28 @@ def merge_into_network(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + line_strategies.update({"geometry": "first", "bounds": "first"}) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies={ + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -934,19 +950,28 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + line_strategies.update({"geometry": "first", "bounds": "first"}) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies={ + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -976,14 +1001,14 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): "exclude_carriers", [] ) hvdc_as_lines = snakemake.params.electricity["hvdc_as_lines"] - aggregation_strategies = snakemake.params.cluster_options.get( - "aggregation_strategies", {} - ) - # translate str entries of aggregation_strategies to pd.Series functions: - aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() - } + # aggregation_strategies = snakemake.params.cluster_options.get( + # "aggregation_strategies", {} + # ) + ## translate str entries of aggregation_strategies to pd.Series functions: + # aggregation_strategies = { + # p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} + # for p in aggregation_strategies.keys() + # } n, trafo_map = simplify_network_to_base_voltage(n, linetype, base_voltage) Nyears = n.snapshot_weightings.objective.sum() / 8760 @@ -1004,7 +1029,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): snakemake.params.config_links, snakemake.output, exclude_carriers, - aggregation_strategies, + snakemake.params.aggregation_strategies, ) busmaps = [trafo_map, simplify_links_map] @@ -1021,12 +1046,14 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): hvdc_as_lines, lines_length_factor, snakemake.output, - aggregation_strategies=aggregation_strategies, + aggregation_strategies=snakemake.params.aggregation_strategies, ) busmaps.append(stub_map) if cluster_config.get("to_substations", False): - n, substation_map = aggregate_to_substations(n, aggregation_strategies) + n, substation_map = aggregate_to_substations( + n, snakemake.params.aggregation_strategies + ) busmaps.append(substation_map) # treatment of outliers (nodes without a profile for considered carrier): @@ -1058,7 +1085,9 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): logger.info( f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}." ) - n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i) + n, busmap_hac = aggregate_to_substations( + n, params.aggregation_strategies, buses_i + ) busmaps.append(busmap_hac) if snakemake.wildcards.simpl: @@ -1088,7 +1117,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): solver_name, cluster_config.get("algorithm", "hac"), cluster_config.get("feature", None), - aggregation_strategies, + aggregation_strategies=snakemake.params.aggregation_strategies, ) busmaps.append(cluster_map) @@ -1118,7 +1147,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): n, merged_nodes_map = merge_isolated_nodes( n, threshold=p_threshold_merge_isolated, - aggregation_strategies=aggregation_strategies, + aggregation_strategies=snakemake.params.aggregation_strategies, ) busmaps.append(merged_nodes_map) @@ -1126,7 +1155,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): n, fetched_nodes_map = merge_into_network( n, threshold=s_threshold_fetch_isolated, - aggregation_strategies=aggregation_strategies, + aggregation_strategies=snakemake.params.aggregation_strategies, ) busmaps.append(fetched_nodes_map) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f83b47478..1e5ba5a75 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -52,15 +52,15 @@ linear optimal power flow (plus investment planning) is provided in the `documentation of PyPSA `_. -The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function. -Additionally, some extra constraints specified in :mod:`prepare_network` are added. +The optimization is based on the :func:`network.optimize` function. +Additionally, some extra constraints specified in :mod:`prepare_network` and :mod:`solve_network` are added. Solving the network in multiple iterations is motivated through the dependence of transmission line capacities and impedances on values of corresponding flows. As lines are expanded their electrical parameters change, which renders the optimisation bilinear even if the power flow equations are linearized. To retain the computational advantage of continuous linear programming, a sequential linear programming technique is used, where in between iterations the line impedances are updated. -Details (and errors made through this heuristic) are discussed in the paper +Details (and errors introduced through this heuristic) are discussed in the paper - Fabian Neumann and Tom Brown. `Heuristics for Transmission Expansion Planning in Low-Carbon Energy System Models `_), *16th International Conference on the European Energy Market*, 2019. `arXiv:1907.10548 `_. @@ -85,21 +85,16 @@ import pandas as pd import pypsa from _helpers import configure_logging, create_logger +from linopy import merge from pypsa.descriptors import get_switchable_as_dense as get_as_dense -from pypsa.linopf import ( - define_constraints, - define_variables, - get_var, - ilopf, - join_exprs, - linexpr, - network_lopf, -) +from pypsa.optimization.abstract import optimize_transmission_expansion_iteratively +from pypsa.optimization.optimize import optimize +from vresutils.benchmark import memory_logger logger = create_logger(__name__) -def prepare_network(n, solve_opts): +def prepare_network(n, solve_opts, config): if "clip_p_max_pu" in solve_opts: for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow): df.where(df > solve_opts["clip_p_max_pu"], other=0.0, inplace=True) @@ -149,6 +144,25 @@ def prepare_network(n, solve_opts): def add_CCL_constraints(n, config): + """ + Add CCL (country & carrier limit) constraint to the network. + + Add minimum and maximum levels of generator nominal capacity per carrier + for individual countries. Opts and path for agg_p_nom_minmax.csv must be defined + in config.yaml. Default file is available at data/agg_p_nom_minmax.csv. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-CCL-24H] + electricity: + agg_p_nom_limits: data/agg_p_nom_minmax.csv + """ agg_p_nom_limits = config["electricity"].get("agg_p_nom_limits") try: @@ -164,32 +178,57 @@ def add_CCL_constraints(n, config): ) gen_country = n.generators.bus.map(n.buses.country) - # cc means country and carrier - p_nom_per_cc = ( - pd.DataFrame( - { - "p_nom": linexpr((1, get_var(n, "Generator", "p_nom"))), - "country": gen_country, - "carrier": n.generators.carrier, - } + capacity_variable = n.model["Generator-p_nom"] + + lhs = [] + ext_carriers = n.generators.query("p_nom_extendable").carrier.unique() + for c in ext_carriers: + ext_carrier = n.generators.query("p_nom_extendable and carrier == @c") + country_grouper = ( + ext_carrier.bus.map(n.buses.country) + .rename_axis("Generator-ext") + .rename("country") ) - .dropna(subset=["p_nom"]) - .groupby(["country", "carrier"]) - .p_nom.apply(join_exprs) + ext_carrier_per_country = capacity_variable.loc[ + country_grouper.index + ].groupby_sum(country_grouper) + lhs.append(ext_carrier_per_country) + lhs = merge(lhs, dim=pd.Index(ext_carriers, name="carrier")) + + min_matrix = agg_p_nom_minmax["min"].to_xarray().unstack().reindex_like(lhs) + max_matrix = agg_p_nom_minmax["max"].to_xarray().unstack().reindex_like(lhs) + + n.model.add_constraints( + lhs >= min_matrix, name="agg_p_nom_min", mask=min_matrix.notnull() + ) + n.model.add_constraints( + lhs <= max_matrix, name="agg_p_nom_max", mask=max_matrix.notnull() ) - minimum = agg_p_nom_minmax["min"].dropna() - if not minimum.empty: - minconstraint = define_constraints( - n, p_nom_per_cc[minimum.index], ">=", minimum, "agg_p_nom", "min" - ) - maximum = agg_p_nom_minmax["max"].dropna() - if not maximum.empty: - maxconstraint = define_constraints( - n, p_nom_per_cc[maximum.index], "<=", maximum, "agg_p_nom", "max" - ) def add_EQ_constraints(n, o, scaling=1e-1): + """ + Add equity constraints to the network. + + Currently this is only implemented for the electricity sector only. + + Opts must be specified in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + o : str + + Example + ------- + scenario: + opts: [Co2L-EQ0.7-24h] + + Require each country or node to on average produce a minimal share + of its total electricity consumption itself. Example: EQ0.7c demands each country + to produce on average at least 70% of its consumption; EQ0.7 demands + each node to produce on average at least 70% of its consumption. + """ float_regex = "[0-9]*\.?[0-9]+" level = float(re.findall(float_regex, o)[0]) if o[-1] == "c": @@ -210,99 +249,150 @@ def add_EQ_constraints(n, o, scaling=1e-1): ) inflow = inflow.reindex(load.index).fillna(0.0) rhs = scaling * (level * load - inflow) + dispatch_variable = n.model["Generator-p"] lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (dispatch_variable * (n.snapshot_weightings.generators * scaling)) + .groupby(ggrouper.to_xarray()) + .sum() + .sum("snapshot") ) - lhs_spill = ( - linexpr( - ( - -n.snapshot_weightings.stores * scaling, - get_var(n, "StorageUnit", "spill").T, - ) + # TODO: double check that this is really needed, why do have to subtract the spillage + if not n.storage_units_t.inflow.empty: + spillage_variable = n.model["StorageUnit-spill"] + lhs_spill = ( + (spillage_variable * (-n.snapshot_weightings.stores * scaling)) + .groupby_sum(sgrouper) + .groupby(sgrouper.to_xarray()) + .sum() + .sum("snapshot") ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") - lhs = lhs_gen + lhs_spill - define_constraints(n, lhs, ">=", rhs, "equity", "min") + lhs = lhs_gen + lhs_spill + else: + lhs = lhs_gen + n.model.add_constraints(lhs >= rhs, name="equity_min") def add_BAU_constraints(n, config): - ext_c = n.generators.query("p_nom_extendable").carrier.unique() - mincaps = pd.Series( - config["electricity"].get("BAU_mincapacities", {key: 0 for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, ">=", mincaps[lhs.index], "Carrier", "bau_mincaps") - - maxcaps = pd.Series( - config["electricity"].get("BAU_maxcapacities", {key: np.inf for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, "<=", maxcaps[lhs.index], "Carrier", "bau_maxcaps") + """ + Add a per-carrier minimal overall capacity. + + BAU_mincapacities and opts must be adjusted in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-BAU-24h] + electricity: + BAU_mincapacities: + solar: 0 + onwind: 0 + OCGT: 100000 + offwind-ac: 0 + offwind-dc: 0 + Which sets minimum expansion across all nodes e.g. in Europe to 100GW. + OCGT bus 1 + OCGT bus 2 + ... > 100000 + """ + mincaps = pd.Series(config["electricity"]["BAU_mincapacities"]) + p_nom = n.model["Generator-p_nom"] + ext_i = n.generators.query("p_nom_extendable") + ext_carrier_i = xr.DataArray(ext_i.carrier.rename_axis("Generator-ext")) + lhs = p_nom.groupby(ext_carrier_i).sum() + rhs = mincaps[lhs.indexes["carrier"]].rename_axis("carrier") + n.model.add_constraints(lhs >= rhs, name="bau_mincaps") def add_SAFE_constraints(n, config): - peakdemand = ( - 1.0 + config["electricity"]["SAFE_reservemargin"] - ) * n.loads_t.p_set.sum(axis=1).max() - conv_techs = config["plotting"]["conv_techs"] + """ + Add a capacity reserve margin of a certain fraction above the peak demand. + Renewable generators and storage do not contribute. Ignores network. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + config.yaml requires to specify opts: + + scenario: + opts: [Co2L-SAFE-24h] + electricity: + SAFE_reservemargin: 0.1 + Which sets a reserve margin of 10% above the peak demand. + """ + peakdemand = n.loads_t.p_set.sum(axis=1).max() + margin = 1.0 + config["electricity"]["SAFE_reservemargin"] + reserve_margin = peakdemand * margin + conventional_carriers = config["electricity"]["conventional_carriers"] + ext_gens_i = n.generators.query( + "carrier in @conventional_carriers & p_nom_extendable" + ).index + capacity_variable = n.model["Generator-p_nom"] + p_nom = n.model["Generator-p_nom"].loc[ext_gens_i] + lhs = p_nom.sum() exist_conv_caps = n.generators.query( - "~p_nom_extendable & carrier in @conv_techs" + "~p_nom_extendable & carrier in @conventional_carriers" ).p_nom.sum() - ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index - lhs = linexpr((1, get_var(n, "Generator", "p_nom")[ext_gens_i])).sum() - rhs = peakdemand - exist_conv_caps - define_constraints(n, lhs, ">=", rhs, "Safe", "mintotalcap") + rhs = reserve_margin - exist_conv_caps + n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap") -def add_operational_reserve_margin_constraint(n, config): +def add_operational_reserve_margin_constraint(n, sns, config): + """ + Build reserve margin constraints based on the formulation + as suggested in GenX + https://energy.mit.edu/wp-content/uploads/2017/10/Enhanced-Decision-Support-for-a-Changing-Electricity-Landscape.pdf + It implies that the reserve margin also accounts for optimal + dispatch of distributed energy resources (DERs) and demand response + which is a novel feature of GenX. + """ reserve_config = config["electricity"]["operational_reserve"] EPSILON_LOAD = reserve_config["epsilon_load"] EPSILON_VRES = reserve_config["epsilon_vres"] CONTINGENCY = reserve_config["contingency"] # Reserve Variables - reserve = get_var(n, "Generator", "r") - lhs = linexpr((1, reserve)).sum(1) + n.model.add_variables( + 0, np.inf, coords=[sns, n.generators.index], name="Generator-r" + ) + reserve = n.model["Generator-r"] + lhs = reserve.sum("Generator") # Share of extendable renewable capacities ext_i = n.generators.query("p_nom_extendable").index vres_i = n.generators_t.p_max_pu.columns if not ext_i.empty and not vres_i.empty: capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)] - renewable_capacity_variables = get_var(n, "Generator", "p_nom")[ - vres_i.intersection(ext_i) - ] - lhs += linexpr( - (-EPSILON_VRES * capacity_factor, renewable_capacity_variables) - ).sum(1) + renewable_capacity_variables = ( + n.model["Generator-p_nom"] + .loc[vres_i.intersection(ext_i)] + .rename({"Generator-ext": "Generator"}) + ) + lhs = merge( + lhs, + (renewable_capacity_variables * (-EPSILON_VRES * capacity_factor)).sum( + ["Generator"] + ), + ) - # Total demand at t - demand = n.loads_t.p.sum(1) + # Total demand per t + demand = get_as_dense(n, "Load", "p_set").sum(axis=1) # VRES potential of non extendable generators capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)] renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)] - potential = (capacity_factor * renewable_capacity).sum(1) + potential = (capacity_factor * renewable_capacity).sum(axis=1) # Right-hand-side rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY - define_constraints(n, lhs, ">=", rhs, "Reserve margin") + n.model.add_constraints(lhs >= rhs, name="reserve_margin") def update_capacity_constraint(n): @@ -310,65 +400,82 @@ def update_capacity_constraint(n): ext_i = n.generators.query("p_nom_extendable").index fix_i = n.generators.query("not p_nom_extendable").index - dispatch = get_var(n, "Generator", "p") - reserve = get_var(n, "Generator", "r") - + dispatch = n.model["Generator-p"] + reserve = n.model["Generator-r"] capacity_fixed = n.generators.p_nom[fix_i] p_max_pu = get_as_dense(n, "Generator", "p_max_pu") - lhs = linexpr((1, dispatch), (1, reserve)) + lhs = merge( + dispatch * 1, + reserve * 1, + ) if not ext_i.empty: - capacity_variable = get_var(n, "Generator", "p_nom") - lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex( - columns=gen_i, fill_value="" - ) + capacity_variable = n.model["Generator-p_nom"] + lhs = dispatch + reserve - capacity_variable * xr.DataArray(p_max_pu[ext_i]) rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0) - - define_constraints(n, lhs, "<=", rhs, "Generators", "updated_capacity_constraint") + n.model.add_constraints( + lhs <= rhs, name="gen_updated_capacity_constraint", mask=rhs.notnull() + ) def add_operational_reserve_margin(n, sns, config): """ Build reserve margin constraints based on the formulation given in https://genxproject.github.io/GenX/dev/core/#Reserves. - """ - - define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index]) - - add_operational_reserve_margin_constraint(n, config) + Parameters + ---------- + n : pypsa.Network + sns: pd.DatetimeIndex + config : dict + + Example: + -------- + config.yaml requires to specify operational_reserve: + operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves + activate: true + epsilon_load: 0.02 # percentage of load at each snapshot + epsilon_vres: 0.02 # percentage of VRES at each snapshot + contingency: 400000 # MW + """ + add_operational_reserve_margin_constraint(n, sns, config) update_capacity_constraint(n) def add_battery_constraints(n): + """ + Add constraint ensuring that charger = discharger, i.e. + 1 * charger_size - efficiency * discharger_size = 0 + """ nodes = n.buses.index[n.buses.carrier == "battery"] - if nodes.empty or ("Link", "p_nom") not in n.variables.index: + # if nodes.empty or ("Link", "p_nom") not in n.variables.index: + if nodes.empty: return - link_p_nom = get_var(n, "Link", "p_nom") - lhs = linexpr( - (1, link_p_nom[nodes + " charger"]), - ( - -n.links.loc[nodes + " discharger", "efficiency"].values, - link_p_nom[nodes + " discharger"].values, - ), + vars_link = n.model["Link-p_nom"] + eff = n.links.loc[nodes + " discharger", "efficiency"] + lhs = merge( + vars_link.sel({"Link-ext": nodes + " charger"}) * 1, + # for some reasons, eff is one element longer as compared with vars_link + vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff[0], ) - define_constraints(n, lhs, "=", 0, "Link", "charger_ratio") + n.model.add_constraints(lhs == 0, name="link_charger_ratio") -def add_RES_constraints(n, res_share): +def add_RES_constraints(n, res_share, config): lgrouper = n.loads.bus.map(n.buses.country) + # TODO drop load ggrouper = n.generators.bus.map(n.buses.country) sgrouper = n.storage_units.bus.map(n.buses.country) cgrouper = n.links.bus0.map(n.buses.country) logger.warning( - "The add_RES_constraints functionality is still work in progress. " + "The add_RES_constraints() is still work in progress. " "Unexpected results might be incurred, particularly if " "temporal clustering is applied or if an unexpected change of technologies " - "is subject to the obtimisation." + "is subject to the optimisation." ) load = ( @@ -378,109 +485,67 @@ def add_RES_constraints(n, res_share): rhs = res_share * load - res_techs = [ - "solar", - "onwind", - "offwind-dc", - "offwind-ac", - "battery", - "hydro", - "ror", - ] + renew_techs = config["electricity"]["renewable_carriers"] charger = ["H2 electrolysis", "battery charger"] discharger = ["H2 fuel cell", "battery discharger"] - gens_i = n.generators.query("carrier in @res_techs").index - stores_i = n.storage_units.query("carrier in @res_techs").index + gens_i = n.generators.query("carrier in @renew_techs").index + + stores_i = n.storage_units.query("carrier in @renew_techs").index charger_i = n.links.query("carrier in @charger").index discharger_i = n.links.query("carrier in @discharger").index + stores_t_weights = n.snapshot_weightings.stores + # Generators + # TODO restore grouping by countries un-commenting calls of groupby() lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators, get_var(n, "Generator", "p")[gens_i].T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (n.model["Generator-p"].loc[:, gens_i] * n.snapshot_weightings.generators) + # .groupby(ggrouper.to_xarray()) + .sum() ) - # StorageUnits - lhs_dispatch = ( - ( - linexpr( - ( - n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_dispatch")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_disp_expr = ( + n.model["StorageUnit-p_dispatch"].loc[:, stores_i] * stores_t_weights + ) + store_expr = n.model["StorageUnit-p_store"].loc[:, stores_i] * stores_t_weights + charge_expr = n.model["Link-p"].loc[:, charger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[charger_i].efficiency + ) + discharge_expr = n.model["Link-p"].loc[:, discharger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[discharger_i].efficiency ) + lhs_dispatch = ( + store_disp_expr + # .groupby(sgrouper) + .sum() + ) lhs_store = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_store")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_expr + # .groupby(sgrouper) + .sum() ) - - # Stores (or their resp. Link components) - # Note that the variables "p0" and "p1" currently do not exist. - # Thus, p0 and p1 must be derived from "p" (which exists), taking into account the link efficiency. lhs_charge = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "Link", "p")[charger_i].T, - ) - ) - .T.groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + charge_expr + # .groupby(cgrouper) + .sum() ) - lhs_discharge = ( - ( - linexpr( - ( - n.snapshot_weightings.stores.apply( - lambda r: r * n.links.loc[discharger_i].efficiency - ), - get_var(n, "Link", "p")[discharger_i], - ) - ) - .groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + discharge_expr + # .groupby(cgrouper) + .sum() ) - # signs of resp. terms are coded in the linexpr. - # todo: for links (lhs_charge and lhs_discharge), account for snapshot weightings - lhs = lhs_gen + lhs_dispatch + lhs_store + lhs_charge + lhs_discharge + lhs = lhs_gen + lhs_dispatch - lhs_store - lhs_charge + lhs_discharge - define_constraints(n, lhs, "=", rhs, "RES share") + n.model.add_constraints(lhs == rhs, name="res_share") def extra_functionality(n, snapshots): """ Collects supplementary constraints which will be passed to - ``pypsa.linopf.network_lopf``. + ``pypsa.optimization.optimize``. If you want to enforce additional custom constraints, this is a good location to add them. The arguments ``opts`` and ``snakemake.config`` are expected to be attached to the network. @@ -499,44 +564,50 @@ def extra_functionality(n, snapshots): for o in opts: if "RES" in o: res_share = float(re.findall("[0-9]*\.?[0-9]+$", o)[0]) - add_RES_constraints(n, res_share) + add_RES_constraints(n, res_share, config) for o in opts: if "EQ" in o: add_EQ_constraints(n, o) add_battery_constraints(n) -def solve_network(n, config, opts="", **kwargs): - solver_options = config["solving"]["solver"].copy() - solver_name = solver_options.pop("name") - cf_solving = config["solving"]["options"] - track_iterations = cf_solving.get("track_iterations", False) - min_iterations = cf_solving.get("min_iterations", 4) - max_iterations = cf_solving.get("max_iterations", 6) +def solve_network(n, config, solving, **kwargs): + set_of_options = solving["solver"]["options"] + cf_solving = solving["options"] + + kwargs["solver_options"] = ( + solving["solver_options"][set_of_options] if set_of_options else {} + ) + kwargs["solver_name"] = solving["solver"]["name"] + + skip_iterations = cf_solving.get("skip_iterations", False) + if not n.lines.s_nom_extendable.any(): + skip_iterations = True + logger.info("No expandable lines found. Skipping iterative solving.") # add to network for extra_functionality n.config = config - n.opts = opts - - if cf_solving.get("skip_iterations", False): - network_lopf( - n, - solver_name=solver_name, - solver_options=solver_options, - extra_functionality=extra_functionality, - **kwargs, - ) + + if skip_iterations: + status, condition = n.optimize(**kwargs) else: - ilopf( - n, - solver_name=solver_name, - solver_options=solver_options, - track_iterations=track_iterations, - min_iterations=min_iterations, - max_iterations=max_iterations, - extra_functionality=extra_functionality, - **kwargs, + kwargs["track_iterations"] = (cf_solving.get("track_iterations", False),) + kwargs["min_iterations"] = (cf_solving.get("min_iterations", 4),) + kwargs["max_iterations"] = (cf_solving.get("max_iterations", 6),) + status, condition = n.optimize.optimize_transmission_expansion_iteratively( + **kwargs + ) + + if status != "ok": # and not rolling_horizon: + logger.warning( + f"Solving status '{status}' with termination condition '{condition}'" ) + if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info(f"Labels:\n{labels}") + n.model.print_infeasibilities() + raise RuntimeError("Solving status 'infeasible'") + return n @@ -554,25 +625,23 @@ def solve_network(n, config, opts="", **kwargs): ) configure_logging(snakemake) - tmpdir = snakemake.params.solving.get("tmpdir") - if tmpdir is not None: - Path(tmpdir).mkdir(parents=True, exist_ok=True) opts = snakemake.wildcards.opts.split("-") solve_opts = snakemake.params.solving["options"] n = pypsa.Network(snakemake.input[0]) - if snakemake.params.augmented_line_connection.get("add_to_snakefile"): - n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( - snakemake.params.augmented_line_connection.get("min_expansion") - ) - n = prepare_network(n, solve_opts) - + # needed to get `n.model` property + n.optimize.create_model() + # TODO Double-check handling the augmented case + # if snakemake.params.augmented_line_connection.get("add_to_snakefile"): + # n.lines.loc[ + # n.lines.index.str.contains("new"), "s_nom_min" + # ] = snakemake.params.augmented_line_connection.get("min_expansion") + n = prepare_network(n, solve_opts, config=solve_opts) n = solve_network( n, config=snakemake.config, - opts=opts, - solver_dir=tmpdir, - solver_logfile=snakemake.log.solver, + solving=snakemake.params.solving, + log_fn=snakemake.log.solver, ) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0])