From 61420b101fbcb06ed2972063a47870d992f4fc1f Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Sun, 3 Nov 2024 17:43:29 +0100 Subject: [PATCH] fix grid clearings usage --- .../clearing_algorithms/nodal_pricing.py | 77 ++-- emarketpy/clearing_algorithms/redispatch.py | 50 +-- emarketpy/grid_utils.py | 329 ++++++++++++++++++ emarketpy/utils.py | 35 ++ 4 files changed, 447 insertions(+), 44 deletions(-) create mode 100644 emarketpy/grid_utils.py diff --git a/emarketpy/clearing_algorithms/nodal_pricing.py b/emarketpy/clearing_algorithms/nodal_pricing.py index e53fca8..857a444 100644 --- a/emarketpy/clearing_algorithms/nodal_pricing.py +++ b/emarketpy/clearing_algorithms/nodal_pricing.py @@ -17,8 +17,9 @@ read_pypsa_grid, ) from ..market_objects import MarketConfig, Orderbook +from ..utils import suppress_output -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) logging.getLogger("linopy").setLevel(logging.WARNING) logging.getLogger("pypsa").setLevel(logging.WARNING) @@ -47,7 +48,10 @@ def __init__(self, marketconfig: MarketConfig): self.network = pypsa.Network() # set snapshots as list from the value marketconfig.producs.count converted to list self.network.snapshots = range(marketconfig.market_products[0].count) - assert self.grid_data + + if not self.grid_data: + logger.error(f"Market '{marketconfig.market_id}': grid_data is missing.") + raise ValueError("grid_data is missing.") read_pypsa_grid( network=self.network, @@ -68,25 +72,23 @@ def __init__(self, marketconfig: MarketConfig): loads=self.grid_data["loads"], ) - self.solver = marketconfig.param_dict.get("solver", "glpk") - self.env = None - + self.solver = marketconfig.param_dict.get("solver", "highs") if self.solver == "gurobi": - try: - from gurobipy import Env - - self.env = Env() - self.env.setParam("LogToConsole", 0) - except ImportError: - log.error("gurobi not installed - using GLPK") - self.solver = "glpk" + self.solver_options = {"LogToConsole": 0, "OutputFlag": 0} + elif self.solver == "highs": + self.solver_options = {"output_flag": False, "log_to_console": False} # set the market clearing principle # as pay as bid or pay as clear self.payment_mechanism = marketconfig.param_dict.get( "payment_mechanism", "pay_as_bid" ) - assert self.payment_mechanism in ["pay_as_bid", "pay_as_clear"] + + if self.payment_mechanism not in ["pay_as_bid", "pay_as_clear"]: + logger.error( + f"Market '{marketconfig.market_id}': Invalid payment mechanism '{self.payment_mechanism}'." + ) + raise ValueError("Invalid payment mechanism.") def setup(self): super().setup() @@ -149,17 +151,22 @@ def clear( # Update marginal costs for generators nodal_network.generators_t.marginal_cost.update(costs) - status, termination_condition = nodal_network.optimize( - solver_name=self.solver, - env=self.env, - ) + with suppress_output(): + status, termination_condition = nodal_network.optimize( + solver_name=self.solver, + solver_options=self.solver_options, + ) if status != "ok": - log.error(f"Solver exited with {termination_condition}") + logger.error(f"Solver exited with {termination_condition}") raise Exception("Solver in redispatch market did not converge") + log_flows = True + # process dispatch data - self.process_dispatch_data(network=nodal_network, orderbook_df=orderbook_df) + flows = self.process_dispatch_data( + network=nodal_network, orderbook_df=orderbook_df, log_flows=log_flows + ) # return orderbook_df back to orderbook format as list of dicts accepted_orders = orderbook_df.to_dict("records") @@ -173,9 +180,14 @@ def clear( calculate_network_meta(network=nodal_network, product=product, i=i) ) - return accepted_orders, rejected_orders, meta, {} + return accepted_orders, rejected_orders, meta, flows - def process_dispatch_data(self, network: pypsa.Network, orderbook_df: pd.DataFrame): + def process_dispatch_data( + self, + network: pypsa.Network, + orderbook_df: pd.DataFrame, + log_flows: bool = False, + ): """ This function processes the dispatch data to calculate the dispatch volumes and prices and update the orderbook with the accepted volumes and prices. @@ -225,3 +237,24 @@ def process_dispatch_data(self, network: pypsa.Network, orderbook_df: pd.DataFra nodal_marginal_prices[unit_node], 0, ) + + # get flows from optimized pypsa network + if log_flows: + # extract flows + # write network flows here if applicable + flows = [] + + # Check if the model has the 'flows' attribute + if hasattr(network, "lines_t"): + flows = network.lines_t.p0 + + flows["datetime"] = orderbook_df["start_time"].unique() + # set datetime as index + flows = flows.set_index("datetime", drop=True) + # pivot the dataframe to have row per line column per datetime + flows = flows.stack().reset_index() + + # rename columns + flows.columns = ["datetime", "line", "flow"] + + return flows diff --git a/emarketpy/clearing_algorithms/redispatch.py b/emarketpy/clearing_algorithms/redispatch.py index 4e97d87..f0ba9ad 100644 --- a/emarketpy/clearing_algorithms/redispatch.py +++ b/emarketpy/clearing_algorithms/redispatch.py @@ -16,8 +16,9 @@ read_pypsa_grid, ) from ..market_objects import MarketConfig, Orderbook +from ..utils import suppress_output -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) logging.getLogger("linopy").setLevel(logging.WARNING) logging.getLogger("pypsa").setLevel(logging.WARNING) @@ -48,7 +49,10 @@ def __init__(self, marketconfig: MarketConfig): self.network = pypsa.Network() # set snapshots as list from the value marketconfig.producs.count converted to list self.network.snapshots = range(marketconfig.market_products[0].count) - assert self.grid_data + + if not self.grid_data: + logger.error(f"Market '{marketconfig.market_id}': grid_data is missing.") + raise ValueError("grid_data is missing.") read_pypsa_grid( network=self.network, @@ -66,25 +70,23 @@ def __init__(self, marketconfig: MarketConfig): loads=self.grid_data["loads"], ) - self.solver = marketconfig.param_dict.get("solver", "glpk") - self.env = None - + self.solver = marketconfig.param_dict.get("solver", "highs") if self.solver == "gurobi": - try: - from gurobipy import Env - - self.env = Env() - self.env.setParam("LogToConsole", 0) - except ImportError: - log.error("gurobi not installed - using GLPK") - self.solver = "glpk" + self.solver_options = {"LogToConsole": 0, "OutputFlag": 0} + elif self.solver == "highs": + self.solver_options = {"output_flag": False, "log_to_console": False} # set the market clearing principle # as pay as bid or pay as clear self.payment_mechanism = marketconfig.param_dict.get( "payment_mechanism", "pay_as_bid" ) - assert self.payment_mechanism in ["pay_as_bid", "pay_as_clear"] + + if self.payment_mechanism not in ["pay_as_bid", "pay_as_clear"]: + logger.error( + f"Market '{marketconfig.market_id}': Invalid payment mechanism '{self.payment_mechanism}'." + ) + raise ValueError("Invalid payment mechanism.") def setup(self): super().setup() @@ -183,15 +185,16 @@ def clear( # if any line is congested, perform redispatch if line_loading.max().max() > 1: - log.debug("Congestion detected") + logger.debug("Congestion detected") - status, termination_condition = redispatch_network.optimize( - solver_name=self.solver, - env=self.env, - ) + with suppress_output(): + status, termination_condition = redispatch_network.optimize( + solver_name=self.solver, + solver_options=self.solver_options, + ) if status != "ok": - log.error(f"Solver exited with {termination_condition}") + logger.error(f"Solver exited with {termination_condition}") raise Exception("Solver in redispatch market did not converge") # process dispatch data @@ -201,7 +204,7 @@ def clear( # if no congestion is detected set accepted volume and price to 0 else: - log.debug("No congestion detected") + logger.debug("No congestion detected") # return orderbook_df back to orderbook format as list of dicts accepted_orders = orderbook_df.to_dict("records") @@ -215,7 +218,10 @@ def clear( calculate_network_meta(network=redispatch_network, product=product, i=i) ) - return accepted_orders, rejected_orders, meta, {} + # write network flows here if applicable + flows = [] + + return accepted_orders, rejected_orders, meta, flows def process_dispatch_data(self, network: pypsa.Network, orderbook_df: pd.DataFrame): """ diff --git a/emarketpy/grid_utils.py b/emarketpy/grid_utils.py new file mode 100644 index 0000000..b51089d --- /dev/null +++ b/emarketpy/grid_utils.py @@ -0,0 +1,329 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +from datetime import timedelta + +import numpy as np +import pandas as pd +import pypsa + +from .market_objects import MarketProduct + + +def add_generators( + network: pypsa.Network, + generators: pd.DataFrame, +) -> None: + """ + Add generators normally to the grid + + Args: + network (pypsa.Network): the pypsa network to which the generators are + generators (pandas.DataFrame): the generators dataframe + """ + p_set = pd.DataFrame( + np.zeros((len(network.snapshots), len(generators.index))), + index=network.snapshots, + columns=generators.index, + ) + + if isinstance(generators, dict): + gen_c = generators.copy() + + if "p_min_pu" not in gen_c.columns: + gen_c["p_min_pu"] = p_set + if "p_max_pu" not in gen_c.columns: + gen_c["p_max_pu"] = p_set + 1 + if "marginal_cost" not in gen_c.columns: + gen_c["marginal_cost"] = p_set + + network.madd( + "Generator", + names=generators.index, + bus=generators["node"], # bus to which the generator is connected to + p_nom=generators[ + "max_power" + ], # Nominal capacity of the powerplant/generator + **gen_c, + ) + else: + # add generators + network.madd( + "Generator", + names=generators.index, + bus=generators["node"], # bus to which the generator is connected to + p_nom=generators[ + "max_power" + ], # Nominal capacity of the powerplant/generator + p_min_pu=p_set, + p_max_pu=p_set + 1, + marginal_cost=p_set, + **generators, + ) + + +def add_redispatch_generators( + network: pypsa.Network, + generators: pd.DataFrame, + backup_marginal_cost: float = 1e5, +) -> None: + """ + Adds the given generators for redispatch. + This includes functions to optimize up as well as down and adds backup capacities of powerplants to be able to adjust accordingly when a congestion happens. + + Args: + network (pypsa.Network): the pypsa network to which the generators are + generators (pandas.DataFrame): the generators dataframe + backup_marginal_cost (float, optional): The cost of dispatching the backup units in [€/MW]. Defaults to 1e5. + """ + p_set = pd.DataFrame( + np.zeros((len(network.snapshots), len(generators.index))), + index=network.snapshots, + columns=generators.index, + ) + + # add generators and their sold capacities as load with reversed sign to have fixed feed in + network.madd( + "Load", + names=generators.index, + bus=generators["node"], # bus to which the generator is connected to + p_set=p_set, + sign=1, + ) + + # add upward redispatch generators + network.madd( + "Generator", + names=generators.index, + suffix="_up", + bus=generators["node"], # bus to which the generator is connected to + p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator + p_min_pu=p_set, + p_max_pu=p_set + 1, + marginal_cost=p_set, + ) + + # add downward redispatch generators + network.madd( + "Generator", + names=generators.index, + suffix="_down", + bus=generators["node"], # bus to which the generator is connected to + p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator + p_min_pu=p_set, + p_max_pu=p_set + 1, + marginal_cost=p_set, + sign=-1, + ) + + # add upward and downward backup generators at each node + network.madd( + "Generator", + names=network.buses.index, + suffix="_backup_up", + bus=network.buses.index, # bus to which the generator is connected to + p_nom=10e4, + marginal_cost=backup_marginal_cost, + ) + + network.madd( + "Generator", + names=network.buses.index, + suffix="_backup_down", + bus=network.buses.index, # bus to which the generator is connected to + p_nom=10e4, + marginal_cost=backup_marginal_cost, + sign=-1, + ) + + +def add_backup_generators( + network: pypsa.Network, + backup_marginal_cost: float = 1e5, +) -> None: + """ + Add generators normally to the grid + + Args: + network (pypsa.Network): the pypsa network to which the generators are + generators (pandas.DataFrame): the generators dataframe + """ + + # add backup generators at each node + network.madd( + "Generator", + names=network.buses.index, + suffix="_backup", + bus=network.buses.index, # bus to which the generator is connected to + p_nom=10e4, + marginal_cost=backup_marginal_cost, + ) + + +def add_loads( + network: pypsa.Network, + loads: pd.DataFrame, +) -> None: + """ + Add loads normally to the grid + + Args: + network (pypsa.Network): the pypsa network to which the loads are + loads (pandas.DataFrame): the loads dataframe + """ + + # add loads + network.madd( + "Load", + names=loads.index, + bus=loads["node"], # bus to which the generator is connected to + **loads, + ) + + if "p_set" not in loads.columns: + network.loads_t["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + + +def add_redispatch_loads( + network: pypsa.Network, + loads: pd.DataFrame, +) -> None: + """ + This adds loads to the redispatch PyPSA network with respective bus data to which they are connected + """ + loads_c = loads.copy() + if "sign" in loads_c.columns: + del loads_c["sign"] + + # add loads with opposite sign (default for loads is -1). This is needed to properly model the redispatch + network.madd( + "Load", + names=loads.index, + bus=loads["node"], # bus to which the generator is connected to + sign=1, + **loads_c, + ) + + if "p_set" not in loads.columns: + network.loads_t["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + + +def add_nodal_loads( + network: pypsa.Network, + loads: pd.DataFrame, +) -> None: + """ + This adds loads to the nodal PyPSA network with respective bus data to which they are connected. + The loads are added as generators with negative sign so their dispatch can be also curtailed, + since regular load in PyPSA represents only an inelastic demand. + """ + p_set = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + loads_c = loads.copy() + + if "sign" in loads_c.columns: + del loads_c["sign"] + + # add loads as negative generators + network.madd( + "Generator", + names=loads.index, + bus=loads["node"], # bus to which the generator is connected to + p_nom=loads["max_power"], # Nominal capacity of the powerplant/generator + p_min_pu=p_set, + p_max_pu=p_set + 1, + marginal_cost=p_set, + sign=-1, + **loads_c, + ) + + +def read_pypsa_grid( + network: pypsa.Network, + grid_dict: dict[str, pd.DataFrame], +): + """ + Generates the pypsa grid from a grid dictionary. + Does not add the generators, as they are added in different ways, depending on wether redispatch is used. + + Args: + network (pypsa.Network): the pypsa network to which the components will be added + grid_dict (dict[str, pd.DataFrame]): the dictionary containing dataframes for generators, loads, buses and links + """ + + def add_buses(network: pypsa.Network, buses: pd.DataFrame) -> None: + network.import_components_from_dataframe(buses, "Bus") + + def add_lines(network: pypsa.Network, lines: pd.DataFrame) -> None: + network.import_components_from_dataframe(lines, "Line") + + # setup the network + add_buses(network, grid_dict["buses"]) + add_lines(network, grid_dict["lines"]) + network.add("Carrier", "AC") + return network + + +def calculate_network_meta(network, product: MarketProduct, i: int): + """ + This function calculates the meta data such as supply and demand volumes, and nodal prices. + + Args: + product (MarketProduct): The product for which clearing happens. + i (int): The index of the product in the market products list. + + Returns: + dict: The meta data. + """ + + meta = [] + duration_hours = (product[1] - product[0]) / timedelta(hours=1) + # iterate over buses + for bus in network.buses.index: + # add backup dispatch to dispatch + # Step 1: Identify generators connected to the specified bus + generators_connected_to_bus = network.generators[ + network.generators.bus == bus + ].index + + # Step 2: Select dispatch levels for these generators from network.generators_t.p + dispatch_for_bus = network.generators_t.p[generators_connected_to_bus].iloc[i] + # multiple by network.generators.sign to get the correct sign for dispatch + dispatch_for_bus = ( + dispatch_for_bus * network.generators.sign[generators_connected_to_bus] + ) + + supply_volume = dispatch_for_bus[dispatch_for_bus > 0].sum() + demand_volume = -dispatch_for_bus[dispatch_for_bus < 0].sum() + if not network.buses_t.marginal_price.empty: + price = network.buses_t.marginal_price[str(bus)].iat[i] + else: + price = 0 + + meta.append( + { + "supply_volume": supply_volume, + "demand_volume": demand_volume, + "demand_volume_energy": demand_volume * duration_hours, + "supply_volume_energy": supply_volume * duration_hours, + "price": price, + "node": bus, + "product_start": product[0], + "product_end": product[1], + "only_hours": product[2], + } + ) + + return meta diff --git a/emarketpy/utils.py b/emarketpy/utils.py index a896d04..dda145c 100644 --- a/emarketpy/utils.py +++ b/emarketpy/utils.py @@ -3,8 +3,11 @@ # SPDX-License-Identifier: AGPL-3.0-or-later import calendar +import contextlib import inspect import logging +import os +import sys from collections import defaultdict from datetime import datetime, timedelta, timezone from functools import wraps @@ -639,3 +642,35 @@ def check_for_tensors(data): pass return data + + +# Define a context manager to suppress output +@contextlib.contextmanager +def suppress_output(): + # Save the original stdout and stderr file descriptors + original_stdout_fd = sys.stdout.fileno() + original_stderr_fd = sys.stderr.fileno() + + # Open /dev/null for redirecting output + devnull = os.open(os.devnull, os.O_WRONLY) + + # Duplicate the original stdout and stderr file descriptors to restore them later + saved_stdout_fd = os.dup(original_stdout_fd) + saved_stderr_fd = os.dup(original_stderr_fd) + + try: + # Redirect stdout and stderr to /dev/null + os.dup2(devnull, original_stdout_fd) + os.dup2(devnull, original_stderr_fd) + + yield # Allow the block of code to execute + + finally: + # Restore stdout and stderr from the saved file descriptors + os.dup2(saved_stdout_fd, original_stdout_fd) + os.dup2(saved_stderr_fd, original_stderr_fd) + + # Close the duplicated file descriptors and /dev/null + os.close(saved_stdout_fd) + os.close(saved_stderr_fd) + os.close(devnull)