From 8021c56e5a22fceba261163c18f04427e195dab9 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Fri, 12 Jan 2024 17:12:01 +0100 Subject: [PATCH 1/2] Add MASTR based OEDS loader - move scenario loaders to separate folder (#264) This adds the documented OEDS loader which can be used to create scenarios from MASTR for NUTS locations, if an open-energy-data-server is available. The scenario loaders are moved to a separate `scenario` folder and all begin with `loader_` Documentation is added accordingly, which reflects the possibilities and a small usage guide of it --- assume/__init__.py | 5 - assume/common/base.py | 2 + .../loader_amiris.py} | 3 +- .../loader_csv.py} | 0 assume/scenario/loader_oeds.py | 271 +++++ assume/scenario/oeds/infrastructure.py | 988 ++++++++++++++++++ assume/scenario/oeds/static.py | 294 ++++++ assume/units/demand.py | 2 +- assume/units/powerplant.py | 3 +- assume/units/storage.py | 5 +- cli.py | 5 +- docs/source/assume.common.rst | 16 +- docs/source/index.rst | 2 + docs/source/scenario_loader.rst | 116 ++ examples/examples.py | 3 +- pyproject.toml | 4 + tests/test_dmas_powerplant.py | 4 +- tests/test_flexable_strategies.py | 2 +- tests/test_powerplant.py | 8 +- tests/test_utils.py | 2 +- tests/test_world.py | 3 +- 21 files changed, 1706 insertions(+), 32 deletions(-) rename assume/{common/scenario_loader_amiris.py => scenario/loader_amiris.py} (99%) rename assume/{common/scenario_loader.py => scenario/loader_csv.py} (100%) create mode 100644 assume/scenario/loader_oeds.py create mode 100644 assume/scenario/oeds/infrastructure.py create mode 100644 assume/scenario/oeds/static.py create mode 100644 docs/source/scenario_loader.rst diff --git a/assume/__init__.py b/assume/__init__.py index 7bcd6545..9116adac 100644 --- a/assume/__init__.py +++ b/assume/__init__.py @@ -3,11 +3,6 @@ # SPDX-License-Identifier: AGPL-3.0-or-later from assume.common import MarketConfig, MarketProduct -from assume.common.scenario_loader import ( - load_custom_units, - load_scenario_folder, - run_learning, -) from assume.world import World __version__ = "0.0.1" diff --git a/assume/common/base.py b/assume/common/base.py index 9169b304..b28aa968 100644 --- a/assume/common/base.py +++ b/assume/common/base.py @@ -41,12 +41,14 @@ def __init__( index: pd.DatetimeIndex, node: str = "", forecaster: Forecaster = None, + location: tuple[float, float] = (0.0, 0.0), **kwargs, ): self.id = id self.unit_operator = unit_operator self.technology = technology self.node = node + self.location = location self.bidding_strategies: dict[str, BaseStrategy] = bidding_strategies self.index = index self.outputs = defaultdict(lambda: pd.Series(0.0, index=self.index)) diff --git a/assume/common/scenario_loader_amiris.py b/assume/scenario/loader_amiris.py similarity index 99% rename from assume/common/scenario_loader_amiris.py rename to assume/scenario/loader_amiris.py index e9173988..1b139aa3 100644 --- a/assume/common/scenario_loader_amiris.py +++ b/assume/scenario/loader_amiris.py @@ -10,6 +10,7 @@ import pandas as pd import yaml from tqdm import tqdm +from yamlinclude import YamlIncludeConstructor from assume.common.base import LearningConfig from assume.common.forecasts import CsvForecaster, Forecaster, NaiveForecast @@ -17,8 +18,6 @@ from assume.world import World logger = logging.getLogger(__name__) -import yaml -from yamlinclude import YamlIncludeConstructor translate_clearing = { "SAME_SHARES": "pay_as_clear", diff --git a/assume/common/scenario_loader.py b/assume/scenario/loader_csv.py similarity index 100% rename from assume/common/scenario_loader.py rename to assume/scenario/loader_csv.py diff --git a/assume/scenario/loader_oeds.py b/assume/scenario/loader_oeds.py new file mode 100644 index 00000000..9ef25856 --- /dev/null +++ b/assume/scenario/loader_oeds.py @@ -0,0 +1,271 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later +import logging +import os +import shutil +import tempfile +from datetime import datetime, timedelta +from pathlib import Path + +import pandas as pd +from dateutil import rrule as rr + +from assume import World +from assume.common.forecasts import NaiveForecast +from assume.common.market_objects import MarketConfig, MarketProduct +from assume.scenario.oeds.infrastructure import InfrastructureInterface +from assume.strategies.dmas_powerplant import DmasPowerplantStrategy + + +async def load_oeds_async( + world: World, + scenario: str, + study_case: str, + infra_uri: str, + marketdesign: list[MarketConfig], + nuts_config: list[str] = [], +): + """ + This initializes a scenario using the open-energy-data-server + https://github.com/NOWUM/open-energy-data-server/ + + Scenarios can use any NUTS area in Germany and use any year with appropriate weather available + """ + year = 2019 + start = datetime(year, 1, 1) + end = datetime(year + 1, 1, 1) - timedelta(hours=1) + index = pd.date_range( + start=start, + end=end, + freq="H", + ) + sim_id = f"{scenario}_{study_case}" + print(f"loading scenario {sim_id}") + infra_interface = InfrastructureInterface("test", infra_uri) + + if not nuts_config: + nuts_config = list(infra_interface.plz_nuts["nuts3"].unique()) + + await world.setup( + start=start, + end=end, + save_frequency_hours=48, + simulation_id=sim_id, + index=index, + ) + # setup eom market + + mo_id = "market_operator" + world.add_market_operator(id=mo_id) + for market_config in marketdesign: + world.add_market(mo_id, market_config) + + co2_price = 1 + fuel_prices = { + "hard coal": 8.6, + "lignite": 1.8, + "oil": 22, + "gas": 26, + "biomass": 20, + "nuclear": 1, + "co2": 20, + } + + default_strategy = {"energy": "naive"} + + world.bidding_strategies["dmas_pwp"] = DmasPowerplantStrategy + dmas_strategy = {"energy": "dmas_pwp"} + bidding_strategies = { + "hard coal": default_strategy, + "lignite": default_strategy, + "oil": default_strategy, + "gas": default_strategy, + "biomass": default_strategy, + "nuclear": default_strategy, + "wind": default_strategy, + "solar": default_strategy, + "demand": default_strategy, + } + + # for each area - add demand and generation + for area in nuts_config: + print(f"loading config {area} for {year}") + config_path = Path.home() / ".assume" / f"{area}_{year}" + if not config_path.is_dir(): + print(f"query database time series") + demand = infra_interface.get_demand_series_in_area(area, year) + demand = demand.resample("H").mean() + # demand in MW + solar, wind = infra_interface.get_renewables_series_in_area( + area, + start, + end, + ) + try: + config_path.mkdir(parents=True, exist_ok=True) + demand.to_csv(config_path / "demand.csv") + solar.to_csv(config_path / "solar.csv") + if isinstance(wind, float): + print(wind, area, year) + wind.to_csv(config_path / "wind.csv") + except Exception: + shutil.rmtree(config_path, ignore_errors=True) + else: + print(f"use existing local time series") + demand = pd.read_csv(config_path / "demand.csv", index_col=0).squeeze() + solar = pd.read_csv(config_path / "solar.csv", index_col=0).squeeze() + wind = pd.read_csv(config_path / "wind.csv", index_col=0).squeeze() + + lat, lon = infra_interface.get_lat_lon_area(area) + + sum_demand = demand.sum(axis=1) + + world.add_unit_operator(f"demand{area}") + world.add_unit( + f"demand{area}1", + "demand", + f"demand{area}", + # the unit_params have no hints + { + "min_power": 0, + "max_power": sum_demand.max(), + "bidding_strategies": bidding_strategies["demand"], + "technology": "demand", + "location": (lat, lon), + "node": area, + }, + NaiveForecast(index, demand=sum_demand), + ) + + world.add_unit_operator(f"renewables{area}") + world.add_unit( + f"renewables{area}_solar", + "power_plant", + f"renewables{area}", + # the unit_params have no hints + { + "min_power": 0, + "max_power": solar.max(), + "bidding_strategies": bidding_strategies["solar"], + "technology": "solar", + "location": (lat, lon), + "node": area, + }, + NaiveForecast( + index, availability=solar / solar.max(), fuel_price=0.1, co2_price=0 + ), + ) + world.add_unit( + f"renewables{area}_wind", + "power_plant", + f"renewables{area}", + # the unit_params have no hints + { + "min_power": 0, + "max_power": wind.max(), + "bidding_strategies": bidding_strategies["wind"], + "technology": "wind", + "location": (lat, lon), + "node": area, + }, + NaiveForecast( + index, availability=wind / wind.max(), fuel_price=0.2, co2_price=0 + ), + ) + + # TODO add biomass, run_hydro and storages + + world.add_unit_operator(f"conventional{area}") + + for fuel_type in ["nuclear", "lignite", "hard coal", "oil", "gas"]: + plants = infra_interface.get_power_plant_in_area(area, fuel_type) + plants = list(plants.T.to_dict().values()) + i = 0 + for plant in plants: + i += 1 + world.add_unit( + f"conventional{area}_{fuel_type}_{i}", + "power_plant", + f"conventional{area}", + # the unit_params have no hints + { + "min_power": plant["minPower"] / 1e3, # kW -> MW + "max_power": plant["maxPower"] / 1e3, # kW -> MW + "bidding_strategies": bidding_strategies[fuel_type], + "emission_factor": plant["chi"], # [t/MWh therm] + "efficiency": plant["eta"], + "technology": fuel_type, + "cold_start_cost": plant["start_cost"], + "ramp_up": plant["ramp_up"], + "ramp_down": plant["ramp_down"], + "location": (lat, lon), + "node": area, + }, + NaiveForecast( + index, + availability=1, + fuel_price=fuel_prices[fuel_type], + co2_price=co2_price, + ), + ) + + +def load_oeds( + world: World, + scenario: str, + study_case: str, + infra_uri: str, + marketdesign: list[MarketConfig], + nuts_config: list[str] = [], +): + """ + Load a scenario from a given path. + + :param world: The world. + :type world: World + :param inputs_path: Path to the inputs folder. + :type inputs_path: str + :param scenario: Name of the scenario. + :type scenario: str + :param study_case: Name of the study case. + :type study_case: str + """ + world.loop.run_until_complete( + load_oeds_async( + world=world, + scenario=scenario, + study_case=study_case, + infra_uri=infra_uri, + marketdesign=marketdesign, + nuts_config=nuts_config, + ) + ) + + +if __name__ == "__main__": + db_uri = "postgresql://assume:assume@localhost:5432/assume" + world = World(database_uri=db_uri) + scenario = "world_mastr" + study_case = "study_case" + # FH Aachen internal server + infra_uri = os.getenv( + "INFRASTRUCTURE_URI", + "postgresql://readonly:readonly@timescale.nowum.fh-aachen.de:5432", + ) + + nuts_config = ["DE1", "DEA", "DEB", "DEC", "DED", "DEE", "DEF"] + marketdesign = [ + MarketConfig( + "EOM", + rr.rrule(rr.HOURLY, interval=24, dtstart=start, until=end), + timedelta(hours=1), + "pay_as_clear", + [MarketProduct(timedelta(hours=1), 24, timedelta(hours=1))], + additional_fields=["block_id", "link", "exclusive_id"], + maximum_bid_volume=1e9, + maximum_bid_price=1e9, + ) + ] + load_oeds(world, scenario, study_case, infra_uri, marketdesign, nuts_config) + world.run() diff --git a/assume/scenario/oeds/infrastructure.py b/assume/scenario/oeds/infrastructure.py new file mode 100644 index 00000000..d6336b5a --- /dev/null +++ b/assume/scenario/oeds/infrastructure.py @@ -0,0 +1,988 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +from datetime import datetime, timedelta + +import holidays +import numpy as np +import pandas as pd +from demandlib.bdew import ElecSlp +from pvlib.irradiance import erbs +from pvlib.location import Location +from pvlib.pvsystem import PVSystem +from sqlalchemy import create_engine +from tqdm import tqdm + +# !pip install git+https://github.com/maurerle/windpowerlib@maurerle +from windpowerlib import ModelChain, WindTurbine + +from assume.scenario.oeds.static import ( + fuel_translation, + mastr_solar_azimuth, + mastr_solar_codes, + mastr_storage, + technical_parameter, +) + +WEATHER_PARAMS_ECMWF = [ + "temp_air", + "ghi", + "wind_speed", +] + + +class InfrastructureInterface: + def __init__( + self, + name, + db_server_uri, + structure_databases=("mastr", "oep", "windmodel", "nuts", "scigrid", "weather"), + ): + self.databases = {} + for db in structure_databases: + self.databases[db] = create_engine( + f"{db_server_uri}/{db}", + connect_args={"application_name": name}, + ) + self.setup() + + def setup(self): + with self.databases["nuts"].connect() as conn: + self.plz_nuts = pd.read_sql_query( + "select code, nuts3, longitude, latitude from plz", + conn, + index_col="code", + ) + + query = 'select kw."Id", "Wert", "Name" from "Katalogwerte" kw join "Katalogkategorien" kk on kw."KatalogKategorieId"=kk."Id"' + with self.databases["mastr"].connect() as conn: + katalogwerte = pd.read_sql_query(query, conn, index_col="Id") + + energietraeger = katalogwerte[ + katalogwerte["Name"].str.contains("Energieträger") + ] + del energietraeger["Name"] + energietraeger = energietraeger.to_dict()["Wert"] + + self.energietraeger_translated = { + fuel_translation.get(y, "unknown"): x for x, y in energietraeger.items() + } + + verbrennungsanlagen = katalogwerte[ + katalogwerte["Name"] == "TechnologieVerbrennungsanlagen" + ] + kernkraft = katalogwerte[katalogwerte["Name"] == "TechnologieKernkraft"] + verbrennungsanlagen = pd.concat([verbrennungsanlagen, kernkraft]) + del verbrennungsanlagen["Name"] + self.mastr_generation_codes = verbrennungsanlagen.to_dict()["Wert"] + # solarlage = katalogwerte[katalogwerte["Name"] == "SolarLage"] + + windlage = katalogwerte[katalogwerte["Name"] == "WindLage"] + del windlage["Name"] + windlage = windlage.to_dict()["Wert"] + windlage_translation = { + "Windkraft an Land": "on_shore", + "Windkraft auf See": "off_shore", + } + + self.mastr_wind_type = { + windlage_translation.get(y, "unknown"): x for x, y in windlage.items() + } + + windhersteller = katalogwerte[katalogwerte["Name"] == "WindHersteller"] + del windhersteller["Name"] + self.windhersteller = windhersteller.to_dict()["Wert"] + + def get_lat_lon(self, plz): + if not isinstance(plz, int): + raise ValueError(f"invalid plz {plz}") + latitude, longitude = self.plz_nuts.loc[plz, ["latitude", "longitude"]] + return latitude, longitude + + def get_lat_lon_area(self, area): + if not area.startswith("DE"): + return self.get_lat_lon(area) + plz_codes = self.get_plz_codes(area) + lat_lons = [self.get_lat_lon(plz) for plz in plz_codes] + lat, lon = np.array(lat_lons).mean(axis=0) + return lat, lon + + def get_plz_codes(self, area): + plzs = self.plz_nuts["nuts3"].str.startswith(area) + return list(self.plz_nuts.loc[plzs].index) + + def aggregate_cchps(self, df): + # CCHP Power Plant with Combination + cchps = df[df["combination"] == 1] + new_cchps = [] + # aggregate with generatorID + for genID in cchps["generatorID"].unique(): + if genID != 0: + cchp = cchps[cchps["generatorID"] == genID] + cchp.index = range(len(cchp)) + cchp.at[0, "maxPower"] = sum(cchp["maxPower"]) + cchp.at[0, "kwkPowerTherm"] = sum(cchp["kwkPowerTherm"]) + cchp.at[0, "kwkPowerElec"] = sum(cchp["kwkPowerElec"]) + cchp.at[0, "turbineTyp"] = "Closed Cycle Heat Power" + cchp.at[0, "fuel"] = "gas_combined" + new_cchps.append( + cchp.loc[0, cchp.columns] + ) # only append the aggregated row! + else: + cchp = cchps[cchps["generatorID"] == 0] + cchp.at[0, "turbineTyp"] = "Closed Cycle Heat Power" + cchp.at[0, "fuel"] = "gas_combined" + for line in range(len(cchp)): + new_cchps.append(cchp.iloc[line]) # append all rows + + # combine the gas turbines without combination flag with the new created + df = pd.concat([df[df["combination"] == 0], pd.DataFrame(new_cchps)]) + df.index = range(len(df)) + + # check the gas turbines with non set combination flag but turbine = typ Closed Cycle Heat Power + for line, row in df.iterrows(): + if all( + [ + row["combination"] == 0, + row["turbineTyp"] == "Closed Cycle Heat Power", + row["fuel"] == "gas", + ] + ): + df.at[line, "fuel"] = "gas_combined" + + return df + + def set_default_params(self, df: pd.DataFrame): + # df["maxPower"] is Rated Power [kW] + df["minPower"] = df["maxPower"] * 0.5 # MinPower = 1/2 MaxPower + df["P0"] = df["minPower"] + 0.1 + df["ramp_up"] = 0.1 * df["maxPower"] # 10% Change per hour + df["ramp_down"] = 0.1 * df["maxPower"] # 10% Change per hour + df["min_down_time"] = 5 # default min stop time 5h + df["min_operating_time"] = 5 # default min run time 5h + df["on"] = 1 # on counter --> Plant is on till 1 hour + df["off"] = 0 # off counter --> Plant is on NOT off + df["eta"] = 0.3 # efficiency + df["chi"] = 1.0 # emission factor [t/MWh therm] + df["start_cost"] = 100 * df["maxPower"] # starting cost [€/kW Rated] + + df["turbineTyp"] = df["turbineTyp"].replace(self.mastr_generation_codes) + + df["startDate"] = df["startDate"].fillna(pd.to_datetime("2005-05-05")) + df["startDate"] = pd.to_datetime(df["startDate"]) + if "combination" in df.columns: # if no combination flag is set, set it to 0 + # 0 if None, else 1 + df["combination"] = df["combination"].notna().astype(int) + else: # Add column for nuclear power plants + df["combination"] = 0 + + type_years = np.asarray([0, 2000, 2024]) # technical setting typ + df["type"] = [type_years[type_years < x.year][-1] for x in df["startDate"]] + df["generatorID"] = df["generatorID"].fillna(0) + if "kwkPowerTherm" in df.columns: + df["kwkPowerTherm"] = df["kwkPowerTherm"].fillna(0) + else: + df["kwkPowerTherm"] = 0 + + if "kwkPowerElec" in df.columns: + df["kwkPowerElec"] = df["kwkPowerElec"].fillna(0) + else: + df["kwkPowerElec"] = 0 + return df + + def get_power_plant_in_area(self, area=52353, fuel_type="lignite"): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + + query = f""" + SELECT ev."EinheitMastrNummer" as "unitID", + ev."Energietraeger" as "fuel", + COALESCE(ev."Laengengrad", {longitude}) as "lon", + COALESCE(ev."Breitengrad", {latitude}) as "lat", + COALESCE(ev."Inbetriebnahmedatum", '2010-01-01') as "startDate", + ev."Nettonennleistung" as "maxPower", + COALESCE(ev."Technologie", 839) as "turbineTyp", + ev."GenMastrNummer" as "generatorID" + """ + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + if fuel_type != "nuclear": + query += f""" + , + kwk."ThermischeNutzleistung" as "kwkPowerTherm", + kwk."ElektrischeKwkLeistung" as "kwkPowerElec", + ev."AnlageIstImKombibetrieb" as "combination" + FROM "EinheitenVerbrennung" ev + LEFT JOIN "AnlagenKwk" kwk ON kwk."KwkMastrNummer" = ev."KwkMastrNummer" + WHERE ev."Postleitzahl" in {plz_codes_str} + AND ev."Energietraeger" = {self.energietraeger_translated[fuel_type]} + AND ev."Nettonennleistung" > 5000 AND ev."EinheitBetriebsstatus" = 35 + AND ev."ArtDerStilllegung" isnull; + """ + else: + query += f""" + FROM "EinheitenKernkraft" ev + WHERE ev."Postleitzahl" in {plz_codes_str} + """ + + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + + if df.empty: + return df + + df["fuel"] = fuel_type # current fuel typ + df = self.set_default_params(df) + + # for all gas turbines check if they are used in a combination of gas and steam turbine + if fuel_type == "gas": + df = self.aggregate_cchps(df) + + # TODO this should not happen + df = df[~df.type.isna()] + + for line, row in df.iterrows(): + type_year = row["type"] + if fuel_type in technical_parameter: + tech_params = technical_parameter[fuel_type][type_year] + else: + tech_params = technical_parameter["gas_combined"][0] + + df.at[line, "minPower"] = ( + df.at[line, "maxPower"] * tech_params.get("minPower", 0) / 100 + ) + df.at[line, "P0"] = df.at[line, "minPower"] + df.at[line, "ramp_up"] = np.round( + df.at[line, "maxPower"] * tech_params["ramp_up"] * 60 / 100, + 2, + ) + df.at[line, "ramp_down"] = np.round( + df.at[line, "maxPower"] * tech_params["ramp_down"] * 60 / 100, + 2, + ) + df.at[line, "eta"] = tech_params["eta"] / 100 # convert to percentage + df.at[line, "chi"] = ( + tech_params["chi"] / 1e3 + ) # [t CO2/MWh therm.] -> [t CO2/kWh therm.] + df.at[line, "min_down_time"] = tech_params["min_down_time"] + df.at[line, "min_operating_time"] = tech_params["min_operating_time"] + + start_cost = tech_params["start_cost"] / 1e3 # [€/MW] -> [€/kW] + df.at[line, "start_cost"] = ( + df.at[line, "maxPower"] * start_cost * 2 + ) # multiply by 2 to respect heat demand + + return df + + def get_solar_systems_in_area(self, area=520, solar_type="roof_top"): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + query = ( + f'SELECT "EinheitMastrNummer" as "unitID", ' + f'"Nettonennleistung" as "maxPower", ' + f'COALESCE("Laengengrad", {longitude}) as "lon", ' + f'COALESCE("Breitengrad", {latitude}) as "lat", ' + f'COALESCE("Hauptausrichtung", 699) as "azimuthCode", ' + f'"Leistungsbegrenzung" as "limited", ' + f'"Einspeisungsart" as "ownConsumption", ' + f'COALESCE("HauptausrichtungNeigungswinkel", 809) as "tiltCode", ' + f'COALESCE("Inbetriebnahmedatum", \'2018-01-01\') as "startDate",' + f'"InanspruchnahmeZahlungNachEeg" as "eeg" ' + f'FROM "EinheitenSolar" ' + f'INNER JOIN "AnlagenEegSolar" ON "EinheitMastrNummer" = "VerknuepfteEinheitenMastrNummern" ' + f'WHERE "Postleitzahl" in {plz_codes_str} ' + f'AND "Lage" = {mastr_solar_codes[solar_type]} ' + f'AND "EinheitBetriebsstatus" = 35;' + ) + + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + # If the response Dataframe is not empty set technical parameter + if df.empty: + return df + + # all PVs with are implemented in 2018 + df["startDate"] = pd.to_datetime(df["startDate"]) + # all PVs with nan are south oriented assets + df["azimuth"] = [ + mastr_solar_azimuth[str(code)] for code in df["azimuthCode"].to_numpy(int) + ] + del df["azimuthCode"] + # all PVs with nan have a tilt angle of 30° + df["tilt"] = [ + mastr_solar_azimuth[str(code)] for code in df["tiltCode"].to_numpy(int) + ] + del df["tiltCode"] + if solar_type == "roof_top": + # all PVs with nan and startDate > 2013 have ownConsumption + missing_values = df["ownConsumption"].isna() + deadline = [date.year > 2013 for date in df["startDate"]] + own_consumption = [ + all([missing_values[i], deadline[i]]) + for i in range(len(missing_values)) + ] + df.loc[own_consumption, "ownConsumption"] = 1 + grid_use = [ + all([missing_values[i], not deadline[i]]) + for i in range(len(missing_values)) + ] + df.loc[grid_use, "ownConsumption"] = 0 + df["ownConsumption"] = df["ownConsumption"].replace(689, 1) + df["ownConsumption"] = df["ownConsumption"].replace(688, 0) + # assumption "regenerative Energiesysteme": + # a year has 1000 hours peak + df["demandP"] = df["maxPower"] * 1e3 + elif solar_type == "free_area" or solar_type == "other": + # set own consumption for solar power plant mounted PVs to 0, because the demand is unknown + df["ownConsumption"] = 0 + if solar_type == "roof_top": + # all PVs with nan and startDate > 2012 and maxPower > 30 kWp are limited to 70% + missing_values = df["limited"].isna() + power_cap = df["maxPower"] > 30 + deadline = [date.year > 2012 for date in df["startDate"]] + limited = [ + all([missing_values[i], deadline[i], power_cap[i]]) + for i in range(len(missing_values)) + ] + df.loc[limited, "limited"] = 803 + # rest nans have no limitation + df["limited"] = df["limited"].fillna(802) + df["limited"] = [ + mastr_solar_azimuth[str(code)] for code in df["limited"].to_numpy(int) + ] + if solar_type == "free_area" or solar_type == "other": + # TODO: Check restrictions for solar power plant + # nans have no limitation + df["limited"] = df["limited"].fillna(802) + df["limited"] = [ + mastr_solar_azimuth[str(code)] for code in df["limited"].to_numpy(int) + ] + # all PVs with nan and startDate > 2016 and maxPower > 100 kWp have direct marketing + missing_values = df["eeg"].isna() + power_cap = df["maxPower"] > 100 + deadline = [date.year > 2016 for date in df["startDate"]] + eeg = [ + all([missing_values[i], deadline[i], power_cap[i]]) + for i in range(len(missing_values)) + ] + df.loc[eeg, "eeg"] = 0 + # rest nans are eeg assets and are managed by the tso + df["eeg"] = df["eeg"].replace(np.nan, 0) + return df + + def get_wind_turbines_in_area(self, area=520, wind_type="on_shore"): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + query = ( + f'SELECT "EinheitMastrNummer" as "unitID", ' + f'"Nettonennleistung" as "maxPower", ' + f'COALESCE("Laengengrad", {longitude}) as "lon", ' + f'COALESCE("Breitengrad", {latitude}) as "lat", ' + f'"Typenbezeichnung" as "typ", ' + f'COALESCE("Hersteller", -1) as "manufacturer", ' + f'"Nabenhoehe" as "height", ' + f'"Rotordurchmesser" as "diameter", ' + f'"ClusterNordsee" as "nordicSea", ' + f'"ClusterOstsee" as "balticSea", ' + f'"GenMastrNummer" as "generatorID", ' + f'COALESCE("Inbetriebnahmedatum", \'2018-01-01\') as "startDate" ' + f'FROM "EinheitenWind" ' + f'WHERE "EinheitBetriebsstatus" = 35 ' + f'AND "Lage" = {self.mastr_wind_type[wind_type]}' + ) + if wind_type == "on_shore": + query += f' AND "Postleitzahl" in {plz_codes_str};' + + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + # If the response Dataframe is not empty set technical parameter + if df.empty: + return df + # all WEA with nan set hight to mean value + df["height"] = df["height"].fillna(df["height"].mean()) + # all WEA with nan set hight to mean diameter + df["diameter"] = df["diameter"].fillna(df["diameter"].mean()) + # all WEA with na are on shore and not allocated to a sea cluster + df["nordicSea"] = df["nordicSea"].fillna(0) + df["balticSea"] = df["balticSea"].fillna(0) + # get name of manufacturer + df["manufacturer"] = df["manufacturer"].replace(self.windhersteller) + # try to find the correct type TODO: Check Pattern of new turbines + # df['typ'] = [str(typ).replace(' ', '').replace('-', '').upper() for typ in df['typ']] + # df['typ'] = [None if re.search(self.pattern_wind, typ) is None else re.search(self.pattern_wind, typ).group() + # for typ in df['typ']] + # df['typ'] = df['typ'].replace('', 'default') + # set tag for wind farms + wind_farm_prefix = f"{area}0F" + df["windFarm"] = "x" + counter = 0 + for genId in df["generatorID"].unique(): + if genId is not None and len(df[df["generatorID"] == genId]) > 1: + windFarm = df[df["generatorID"] == genId] + for line, row in windFarm.iterrows(): + df.at[line, "windFarm"] = f"{wind_farm_prefix}{counter}" + counter += 1 + return df + + def get_biomass_systems_in_area(self, area=520): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + # TODO: Add more Parameters, if the model get more complex + query = ( + f'SELECT "EinheitMastrNummer" as "unitID", ' + f'COALESCE("Inbetriebnahmedatum", \'2018-01-01\') as "startDate", ' + f'"Nettonennleistung" as "maxPower", ' + f'COALESCE("Laengengrad", {longitude}) as "lon", ' + f'COALESCE("Breitengrad", {latitude}) as "lat" ' + f'FROM "EinheitenBiomasse"' + f'WHERE "Postleitzahl" in {plz_codes_str} AND' + f'"EinheitBetriebsstatus" = 35 ;' + ) + + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + # If the response Dataframe is not empty set technical parameter + return df + + def get_run_river_systems_in_area(self, area=520): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + query = ( + f'SELECT "EinheitMastrNummer" as "unitID", ' + f'COALESCE("Inbetriebnahmedatum", \'2018-01-01\') as "startDate", ' + f'"Nettonennleistung" as "maxPower", ' + f'COALESCE("Laengengrad", {longitude}) as "lon", ' + f'COALESCE("Breitengrad", {latitude}) as "lat" ' + f'FROM "EinheitenWasser" ' + f'WHERE "Postleitzahl"::int in {plz_codes_str} AND ' + f'"EinheitBetriebsstatus" = 35 AND "ArtDerWasserkraftanlage" = 890' + ) + + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + + return df + + def get_water_storage_systems(self, area=800): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + query = ( + f'SELECT "EinheitMastrNummer" as "unitID", ' + f'"LokationMastrNummer" as "locationID", ' + f'"SpeMastrNummer" as "storageID", ' + f'"NameStromerzeugungseinheit" as "name", ' + f'COALESCE("Inbetriebnahmedatum", \'2018-01-01\') as "startDate", ' + f'"Nettonennleistung" as "PMinus_max", ' + f'"NutzbareSpeicherkapazitaet" as "VMax", ' + f'"PumpbetriebLeistungsaufnahme" as "PPlus_max", ' + f'COALESCE("Laengengrad", {longitude}) as "lon", ' + f'COALESCE("Breitengrad", {latitude}) as "lat" ' + f'FROM "EinheitenStromSpeicher"' + f'LEFT JOIN "AnlagenStromSpeicher" ON "EinheitMastrNummer" = "VerknuepfteEinheitenMastrNummern" ' + f'WHERE "Postleitzahl"::int in {plz_codes_str} AND ' + f'"EinheitBetriebsstatus" = 35 AND "Technologie" = 1537 AND "EinheitSystemstatus"=472 AND "Land"=84 ' + f'AND "Nettonennleistung" > 500' + ) + # print(query) + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + + # If the response Dataframe is not empty set technical parameter + if df.empty: + return df + + print(df["name"]) + # set charge and discharge power + df["PPlus_max"] = df["PPlus_max"].fillna( + df["PMinus_max"] + ) # fill na with Rated Power + # df['PMinus_max'] = 0 # set min to zero + # df['PPlus_max'] = 0 # set min to zero + + # fill nan values with default from wiki + df["VMax"] = df["VMax"].fillna(0) + df["VMax"] = df["VMax"] + for index, row in df[df["VMax"] == 0].iterrows(): + # storage_volumes is in [MWh] + df.at[index, "VMax"] = mastr_storage.get(row["name"], 0) * 1e3 + + storages = [] + for id_ in df["storageID"].unique(): + data = df[df["storageID"] == id_] + storage = { + "unitID": id_, + "startDate": pd.to_datetime(data["startDate"].to_numpy()[0]), + "PMinus_max": data["PMinus_max"].sum(), + "PPlus_max": data["PPlus_max"].sum(), + "VMax": data["VMax"].to_numpy()[0], + "VMin": 0, + "V0": data["VMax"].to_numpy()[0] / 2, + "lat": data["lat"].to_numpy()[0], + "lon": data["lon"].to_numpy()[0], + "eta_plus": 0.88, + "eta_minus": 0.92, + } + # https://energie.ch/pumpspeicherkraftwerk/ + if storage["VMax"] > 0: + storages.append(storage) + return df + + def get_demand_in_area(self, area): + if area == "DE91C": + # nuts areas changed: https://en.wikipedia.org/wiki/NUTS_statistical_regions_of_Germany#Older_Version + # upstream issue: https://github.com/openego/data_processing/issues/379 + DE915 = self.get_demand_in_area("DE915") + DE919 = self.get_demand_in_area("DE919") + return DE915 + DE919 + elif area == "DEB1C": + return self.get_demand_in_area("DEB16") + elif area == "DEB1D": + return self.get_demand_in_area("DEB19") + query = f"""select sum(sector_consumption_residential) as household, sum(sector_consumption_retail) as business, + sum(sector_consumption_industrial) as industry, sum(sector_consumption_agricultural) as agriculture + from demand where version='v0.4.5' and nuts LIKE '{area}%%' + """ + with self.databases["oep"].connect() as conn: + df = pd.read_sql(query, conn) + # returned in GWh + return df * 1e3 # convert to MWh + + def get_solar_storage_systems_in_area(self, area): + if isinstance(area, str) and area.startswith("DE"): + plz_codes = self.get_plz_codes(area) + if not plz_codes: + raise Exception("invalid areas") + else: + plz_codes = [area] + + for plz in plz_codes: + if plz not in self.plz_nuts.index: + raise Exception("invalid plz code") + + longitude, latitude = self.get_lat_lon_area(area) + plz_codes_str = ", ".join([str(x) for x in plz_codes]) + plz_codes_str = f"({plz_codes_str})" + + query = ( + f'SELECT spe."LokationMastrNummer" as "unitID", ' + f'so."Nettonennleistung" as "maxPower", ' + f'spe."Nettonennleistung" as "batPower", ' + f'COALESCE(so."Laengengrad", {longitude}) as "lon", ' + f'COALESCE(so."Breitengrad", {latitude}) as "lat", ' + f'COALESCE(so."Hauptausrichtung", 699) as "azimuthCode", ' + f'COALESCE(so."Leistungsbegrenzung", 802) as "limited", ' + f'COALESCE(so."Einspeisungsart", 689) as "ownConsumption", ' + f'COALESCE(so."HauptausrichtungNeigungswinkel", 809) as "tiltCode", ' + f'COALESCE(so."Inbetriebnahmedatum", \'2018-01-01\') as "startDate", ' + f'an."NutzbareSpeicherkapazitaet" as "VMax" ' + f'FROM "EinheitenStromSpeicher" spe ' + f'INNER JOIN "EinheitenSolar" so ON spe."LokationMastrNummer" = so."LokationMastrNummer" ' + f'INNER JOIN "AnlagenStromSpeicher" an ON spe."SpeMastrNummer" = an."MastrNummer"' + f'WHERE so."Postleitzahl" in {plz_codes_str} ' + f'AND so."EinheitBetriebsstatus" = 35;' + ) + + # Get Data from Postgres + with self.databases["mastr"].connect() as conn: + df = pd.read_sql(query, conn) + + # If the response Dataframe is not empty set technical parameter + if df.empty: + return df + + df["VMax"] = df["VMax"].fillna(10) + df["ownConsumption"] = df["ownConsumption"].replace(689, 1) + df["ownConsumption"] = df["ownConsumption"].replace(688, 0) + df["limited"] = [ + mastr_solar_azimuth[str(code)] for code in df["limited"].to_numpy(int) + ] + + # all PVs with nan are south oriented assets + df["azimuth"] = [ + mastr_solar_azimuth[str(code)] for code in df["azimuthCode"].to_numpy(int) + ] + del df["azimuthCode"] + # all PVs with nan have a tilt angle of 30° + df["tilt"] = [ + mastr_solar_azimuth[str(code)] for code in df["tiltCode"].to_numpy(int) + ] + del df["tiltCode"] + # assumption "regenerative Energiesysteme": + # a year has 1000 hours peak + df["demandP"] = df["maxPower"] * 1e3 + + df["eta"] = 0.96 + df["V0"] = 0 + return df + + def get_demand_series_in_area(self, area, year=2019): + demand = self.get_demand_in_area(area=area) + demand = demand.T.to_dict()[0] + ann_el_demand_per_sector = { + "g0": demand["business"], + # "h0": demand["household"], + "h0_dyn": demand["household"], + "l0": demand["agriculture"], + "g3": demand["industry"], + } + # d = holidays.DE(subdiv='NW', years=year) + holi = holidays.DE(years=year) + e_slp = ElecSlp(year, holidays=holi) + return e_slp.get_profile(ann_el_demand_per_sector) + + def get_weather_param( + self, + params: str | list[str], + start: datetime, + end: datetime, + area: str = "", + ): + if isinstance(params, str): + params = [params] + params = [f"avg({p}) as {p}" for p in params] + selection = ", ".join(params) + query = f"SELECT time, {selection} FROM ecmwf_eu WHERE time BETWEEN '{start.isoformat()}' AND '{end.isoformat()}'" + if area is not None: + query += f" AND nuts_id LIKE '{area.upper()}%%'" + query += "group by time" + with self.databases["weather"].connect() as connection: + return pd.read_sql_query(query, connection, index_col="time") + + def get_renewables_series_in_area( + self, area: str | int, start: datetime, end: datetime + ): + # prepare weather + longitude, latitude = self.get_lat_lon_area(area) + location = Location(latitude, longitude, tz="Europe/Berlin") + date_range = pd.date_range(start=start, end=end, freq="h") + sun_position = location.get_solarposition(date_range) + weather_df = self.get_weather_param( + WEATHER_PARAMS_ECMWF, + start, + end, + area, + ) + # convert from J/m^2 to Wh/m^2 + weather_df["ghi"] /= 3600 + weather_df["zenith"] = sun_position["zenith"] + weather_df["azimuth"] = sun_position["azimuth"] + # calculate ghi and zenith + calculated = erbs( + np.array(weather_df["ghi"]), + np.array(weather_df["zenith"]), + date_range, + ) + weather_df["dni"] = calculated["dni"] + weather_df["dhi"] = calculated["dhi"] + + # load systems from mastr + solar = self.get_solar_storage_systems_in_area(area) + solar_sys = self.get_solar_systems_in_area(area) + wind = self.get_wind_turbines_in_area(area) + solar_series, bat_power = get_solar_series(solar_sys, weather_df) + solar_series_sys, bat_power_sys = get_solar_series(solar, weather_df) + wind_power = get_wind_series(wind, weather_df) + solar_power = solar_series + solar_series_sys + # conversion kW -> MW + return solar_power / 1e3, wind_power / 1e3 + + def get_grid_nodes(self): + # get scigrid + return {} + + def get_grid_edges(self): + return {} + + +def get_wind_series(wind_systems: pd.DataFrame, weather_df: pd.DataFrame): + data = [ + 0.2 * np.ones(len(weather_df.index)), + weather_df["temp_air"], + weather_df["wind_speed"], + ] + columns = [["roughness_length", "temperature", "wind_speed"], [0, 2, 10]] + ww = pd.DataFrame( + np.asarray(data).T, + index=weather_df.index, + columns=columns, + ) + + wt = WindTurbine(82, turbine_type="E-82/2300") + # todo get wind turbine types from database + wind_power = pd.Series() + std_curve = wt.power_curve + std_curve["value"] = std_curve["value"] / wt.power_curve["value"].max() + for line, row in tqdm(wind_systems.iterrows(), total=len(wind_systems)): + max_power = row["maxPower"] * 1e3 + diameter = float(row["diameter"]) + height = float(row["height"]) + if height <= 0: + # weird fix + height = max_power / 20 + + # small wind systems have no diameter set + if diameter <= 0: + diameter = height + if diameter / 2 > height: + diameter = height + p_curve = std_curve.copy() + p_curve["value"] = std_curve["value"] * max_power + wt = WindTurbine( + hub_height=height, + rotor_diameter=diameter, + nominal_power=max_power, + power_curve=p_curve, + ) + mc = ModelChain(wt).run_model(ww) + wpower = mc.power_output / 1e3 # [W] -> [kW] + wind_power += wpower + return wind_power + + +def get_solar_series(solar_systems: pd.DataFrame, weather_df: pd.DataFrame): + systems = [] + solar_power = pd.Series() + battery_power = pd.Series() + if solar_systems.empty: + return solar_power, battery_power + for info, group in tqdm(solar_systems.groupby(["azimuth", "tilt"])): + azimuth = int(info[0]) + tilt = int(info[1]) + maxPower = group["maxPower"].sum() # in kW + + if "batPower" in group.columns: + battery_power += group["batPower"].sum() + system = PVSystem( + surface_tilt=tilt, + surface_azimuth=azimuth, + module_parameters={"pdc0": maxPower}, + ) + systems.append(system) + + ir = system.get_irradiance( + solar_zenith=weather_df["zenith"], + solar_azimuth=weather_df["azimuth"], + dni=weather_df["dni"], + ghi=weather_df["ghi"], + dhi=weather_df["dhi"], + ) + solar_power += ir["poa_global"] * maxPower + solar_power /= 1e3 # W -> kW + return solar_power, battery_power + + +def get_pwp_agents(interface, areas): + pwp_agents = [] + for area in areas: + print(area) + plants = False + for fuel in ["lignite", "gas", "oil", "hard coal", "nuclear"]: + df = interface.get_power_plant_in_area(area=area, fuel_type=fuel) + if not df.empty: + plants = True + break + if plants: + pwp_agents.append(area) + return pwp_agents + + +def get_res_agents(interface, areas): + res_agents = [] + for area in areas: + print(area) + wind = interface.get_wind_turbines_in_area(area=area) + solar = interface.get_solar_storage_systems_in_area(area=area) + bio = interface.get_biomass_systems_in_area(area=area) + water = interface.get_run_river_systems_in_area(area=area) + if any([not wind.empty, not solar.empty, not bio.empty, not water.empty]): + res_agents.append(area) + return res_agents + + +def get_storage_agents(interface, areas): + str_agents = [] + for area in areas: + print(area) + str = interface.get_water_storage_systems(area) + if str.empty: + continue + # print(str['name']) + if any(str["PMinus_max"] > 1) and any(str["VMax"] > 1): + print(f"add {area}") + str_agents.append(area) + return str_agents + + +def get_dem_agents(areas): + dem_agents = [] + for area in areas: + dem_agents.append(area) + return dem_agents + + +if __name__ == "__main__": + import json + import os + + x = os.getenv("INFRASTRUCTURE_SOURCE", "timescale.nowum.fh-aachen.de:5432") + y = os.getenv("INFRASTRUCTURE_LOGIN", "readonly:readonly") + uri = f"postgresql://{y}@{x}" + interface = InfrastructureInterface("test", uri) + interface.get_plz_codes("DEF") + interface.get_lat_lon(52379) + # x = interface.get_power_plant_in_area(area='DEA2D', fuel_type='gas') + # y = interface.get_water_storage_systems(area=415) + # z = interface.get_solar_storage_systems_in_area(area=415) + # a = interface.get_run_river_systems_in_area(area='DE111') + areas = interface.plz_nuts["nuts3"].unique() + + create_agents = True + if create_agents: + agents = {} + agents["dem"] = get_dem_agents(areas) + agents["res"] = get_res_agents(interface, areas) + agents["str"] = get_storage_agents(interface, areas) + agents["pwp"] = get_pwp_agents(interface, areas) + with open("../agents.json", "w") as f: + json.dump(agents, f, indent=2) + else: + with open("../agents.json", "r") as f: + agents = json.load(f) + + dem = interface.get_demand_in_area(area="DE91C") + solar = interface.get_solar_storage_systems_in_area("DE7") + lignite = interface.get_power_plant_in_area("DE", fuel_type="lignite") + + ## test DEM from NUTS2 vs NUTS3: + level_3 = agents["dem"] + level_2 = list({a[0 : 2 + 2] for a in level_3}) + level_1 = list({a[0 : 2 + 1] for a in level_3}) + + summe = 0 + for a in level_1: + print(a) + dem_a = interface.get_demand_in_area(area=a) + summe += dem_a + print(summe) + + for a in level_2: + print(a) + l3_a = [ag for ag in level_3 if ag.startswith(a)] + + dem_a = interface.get_demand_in_area(area=a) + print(dem_a) + + for area in l3_a: + df = interface.get_demand_in_area(area=area) + print(area, df) + dem_a -= df.fillna(0) + + print(dem_a) + assert (dem_a < 1e-10).all().all() + + ## infra tests + year = 2020 + start = datetime(year, 1, 1) + end = datetime(year, 12, 31) + index = pd.date_range( + start=start, + end=end + timedelta(hours=24), + freq="H", + ) + database = os.getenv("INFRASTRUCTURE_SOURCE", "timescale.nowum.fh-aachen.de:5432") + login = os.getenv("INFRASTRUCTURE_LOGIN", "readonly:readonly") + infra_uri = f"postgresql://{login}@{database}" + infra_interface = InfrastructureInterface("test", infra_uri) + + solar = infra_interface.get_solar_storage_systems_in_area("DE123") + solar_sys = infra_interface.get_solar_systems_in_area("DE127") + wind = infra_interface.get_wind_turbines_in_area("DE7") + weather_df = infra_interface.get_weather_param( + WEATHER_PARAMS_ECMWF, start, end, "DEA" + ) + weather_df_de = infra_interface.get_weather_param( + WEATHER_PARAMS_ECMWF, start, end, "DE" + ) + demand = infra_interface.get_demand_series_in_area("DEA", year) + solar, wind = infra_interface.get_renewables_series_in_area("DE12", start, end) diff --git a/assume/scenario/oeds/static.py b/assume/scenario/oeds/static.py new file mode 100644 index 00000000..7f400129 --- /dev/null +++ b/assume/scenario/oeds/static.py @@ -0,0 +1,294 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +fuel_translation = { + "Wind": "wind", + "Wasser": "hydro", + "Solar": "solar", + "Braunkohle": "lignite", + "Biomasse": "biomass", + "Geothermie": "geothermal", + "Grubengas": "landfill gas", + "andere Gase": "landfill gas", + "nicht biogener Abfall": "waste", + "Steinkohle": "hard coal", + "Erdgas": "gas", + "Mineralölprodukte": "oil", + "Kernenergie": "nuclear", + "Speicher": "storage", +} + + +# SolarLage +mastr_solar_codes = { + "free_area": "852", + "roof_top": "853", + "other": "2484", + "balcony": "2961", + "water": "3002", + "parking_lot": "3058", +} + +mastr_solar_azimuth = { + "695": "0", + "696": "45", + "697": "90", + "698": "135", + "699": "180", + "700": "225", + "701": "270", + "702": "315", + "703": "360", + "704": "-1", + "806": "90", + "807": "75", + "808": "50", + "809": "30", + "810": "10", + "811": "0", + "802": "100", + "803": "70", + "804": "60", + "805": "50", + "1535": "100", +} + +mastr_storage = { + "Niederwartha": "591", + "Bleiloch": "753", + "Hohenwarte 1": "795", + "Hohenwarte 2": "2087", + "Wendefurth": "532", + "Markersbach": "4018", + "Geesthacht": "600", + "Waldeck 1": "478", + "Waldeck 2": "3428", + "Bringhausen": "0", + "Hemfurth": "0", + "illwerke": "0", + "Glems": "560", + "Schwarzenbach": "198", + "Witznau": "220", + "Säckingen": "2064", + "Häusern": "463", + "Waldshut": "402", + "Wehr": "6073", + "Walchenseekraftwerk": "0", + "Happurg": "900", + "Schnitzel": "0", + "Langenprozelten": "950", + "E2307101": "0", + "Goldisthal": "8480", +} + +# start cost given in [€/MW] +# chi in [t CO2/MWh therm.] +# Set technical parameter corresponding to the type (0, 2000, 2024) +technical_parameter = { + "hard coal": { + 0: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 1, + "ramp_down": 1, + "eta": 36, + "chi": 0.355, + "min_down_time": 9, + "min_operating_time": 8, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2000: { + "maxPower": 100, + "minPower": 33, + "ramp_up": 4, + "ramp_down": 4, + "eta": 40, + "chi": 0.355, + "min_down_time": 7, + "min_operating_time": 6, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2024: { + "maxPower": 100, + "minPower": 25, + "ramp_up": 6, + "ramp_down": 6, + "eta": 45, + "chi": 0.355, + "min_down_time": 5, + "min_operating_time": 4, + "start_cost": 60, + "on": 1, + "off": 0, + }, + }, + "gas_combined": { + 0: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 2, + "ramp_down": 2, + "eta": 45, + "chi": 0.202, + "min_down_time": 4, + "min_operating_time": 4, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2000: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 4, + "ramp_down": 4, + "eta": 55, + "chi": 0.202, + "min_down_time": 3, + "min_operating_time": 3, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2024: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 8, + "ramp_down": 8, + "eta": 65, + "chi": 0.202, + "min_down_time": 2, + "min_operating_time": 2, + "start_cost": 60, + "on": 1, + "off": 0, + }, + }, + "gas": { + 0: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 8, + "ramp_down": 8, + "eta": 40, + "chi": 0.202, + "min_down_time": 4, + "min_operating_time": 4, + "start_cost": 20, + "on": 1, + "off": 0, + }, + 2000: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 12, + "ramp_down": 12, + "eta": 45, + "chi": 0.202, + "min_down_time": 3, + "min_operating_time": 3, + "start_cost": 20, + "on": 1, + "off": 0, + }, + 2024: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 15, + "ramp_down": 15, + "eta": 50, + "chi": 0.202, + "min_down_time": 2, + "min_operating_time": 2, + "start_cost": 20, + "on": 1, + "off": 0, + }, + }, + "lignite": { + 0: { + "maxPower": 100, + "minPower": 60, + "ramp_up": 1.0, + "ramp_down": 1.0, + "eta": 34, + "chi": 0.407, + "min_down_time": 9, + "min_operating_time": 8, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2000: { + "maxPower": 100, + "minPower": 50, + "ramp_up": 2.5, + "ramp_down": 2.5, + "eta": 40, + "chi": 0.407, + "min_down_time": 7, + "min_operating_time": 6, + "start_cost": 60, + "on": 1, + "off": 0, + }, + 2024: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 4.0, + "ramp_down": 4.0, + "eta": 45, + "chi": 0.407, + "min_down_time": 5, + "min_operating_time": 4, + "start_cost": 60, + "on": 1, + "off": 0, + }, + }, + "nuclear": { + 0: { + "maxPower": 100, + "minPower": 50, + "ramp_up": 0.5, + "ramp_down": 0.5, + "eta": 33, + "chi": 0, + "min_down_time": 22, + "min_operating_time": 20, + "start_cost": 250, + "on": 1, + "off": 0, + }, + 2000: { + "maxPower": 100, + "minPower": 45, + "ramp_up": 0.5, + "ramp_down": 0.5, + "eta": 35, + "chi": 0, + "min_down_time": 22, + "min_operating_time": 15, + "start_cost": 250, + "on": 1, + "off": 0, + }, + 2024: { + "maxPower": 100, + "minPower": 40, + "ramp_up": 0.5, + "ramp_down": 0.5, + "eta": 38, + "chi": 0, + "min_down_time": 22, + "min_operating_time": 10, + "start_cost": 250, + "on": 1, + "off": 0, + }, + }, +} diff --git a/assume/units/demand.py b/assume/units/demand.py index b6379704..f6091f0b 100644 --- a/assume/units/demand.py +++ b/assume/units/demand.py @@ -61,6 +61,7 @@ def __init__( bidding_strategies=bidding_strategies, index=index, node=node, + location=location, **kwargs, ) """Create a demand unit.""" @@ -76,7 +77,6 @@ def __init__( if isinstance(price, numbers.Real): price = pd.Series(price, index=self.index) self.price = price - self.location = location def execute_current_dispatch( self, diff --git a/assume/units/powerplant.py b/assume/units/powerplant.py index 60ecad06..42689960 100644 --- a/assume/units/powerplant.py +++ b/assume/units/powerplant.py @@ -106,6 +106,7 @@ def __init__( bidding_strategies=bidding_strategies, index=index, node=node, + location=location, **kwargs, ) @@ -136,8 +137,6 @@ def __init__( self.heat_extraction = heat_extraction self.max_heat_extraction = max_heat_extraction - self.location = location - self.init_marginal_cost() def init_marginal_cost(self): diff --git a/assume/units/storage.py b/assume/units/storage.py index a510e7c1..685c93c0 100644 --- a/assume/units/storage.py +++ b/assume/units/storage.py @@ -111,7 +111,7 @@ def __init__( downtime_hot_start: int = 8, # hours downtime_warm_start: int = 48, # hours index: pd.DatetimeIndex = None, - location: tuple[float, float] = None, + location: tuple[float, float] = (0, 0), node: str = None, **kwargs, ): @@ -120,6 +120,7 @@ def __init__( id=id, technology=technology, node=node, + location=location, bidding_strategies=bidding_strategies, index=index, unit_operator=unit_operator, @@ -190,8 +191,6 @@ def __init__( self.warm_start_cost = warm_start_cost * max_power_discharge self.cold_start_cost = cold_start_cost * max_power_discharge - self.location = location - def execute_current_dispatch(self, start: pd.Timestamp, end: pd.Timestamp): """ Execute the current dispatch of the storage unit. diff --git a/cli.py b/cli.py index 1d492b62..ac1d9f37 100644 --- a/cli.py +++ b/cli.py @@ -115,7 +115,10 @@ def cli(args=None): db_uri = f"sqlite:///./examples/local_db/{name}.db" try: - from assume import World, load_scenario_folder, run_learning + # import package after argcomplete.autocomplete + # to improve autocompletion speed + from assume import World + from assume.scenario.loader_csv import load_scenario_folder, run_learning world = World( database_uri=db_uri, diff --git a/docs/source/assume.common.rst b/docs/source/assume.common.rst index f994ba8b..84ce5db1 100644 --- a/docs/source/assume.common.rst +++ b/docs/source/assume.common.rst @@ -56,14 +56,6 @@ assume.common.outputs module :undoc-members: :show-inheritance: -assume.common.scenario\_loader module -------------------------------------- - -.. automodule:: assume.common.scenario_loader - :members: - :undoc-members: - :show-inheritance: - assume.common.units\_operator module ------------------------------------ @@ -80,6 +72,14 @@ assume.common.utils module :undoc-members: :show-inheritance: +assume.scenario.loader\_csv module +------------------------------------- + +.. automodule:: assume.scenario.loader_csv + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index 5335807d..a6e8ed07 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -74,6 +74,7 @@ Documentation * :doc:`market_config` * :doc:`market_mechanism` +* :doc:`scenario_loader` * :doc:`bidding_agents` * :doc:`learning` * :doc:`assume` @@ -85,6 +86,7 @@ Documentation market_config market_mechanism + scenario_loader bidding_agents learning assume diff --git a/docs/source/scenario_loader.rst b/docs/source/scenario_loader.rst new file mode 100644 index 00000000..dd79868b --- /dev/null +++ b/docs/source/scenario_loader.rst @@ -0,0 +1,116 @@ +.. SPDX-FileCopyrightText: ASSUME Developers +.. +.. SPDX-License-Identifier: AGPL-3.0-or-later + +Scenario Loader +=============== + +For compatibility with other simulation tools, ASSUME provides a variety of scenario loaders. These are: + +- :ref:`csv` - File based scenarios (most flexible) +- :ref:`amiris` - used to create comparative studies +- :ref:`oeds` - create scenarios with the Open Energy Data Server + + +The possibilities and a short usage guide of the different scenario loaders are explained below: + + +.. _csv: +CSV +--- + +The CSV loader is the default scenario loader for ASSUME. Everything is configured through a ``config.yaml`` file, which describes a market design and references the input series of the agents, as well as the bidding strategies used. + +It is introduced in :doc:`this example `, where a small simulation is created from scratch. + +If you already have an existing csv scenario, you can load it using the ASSUME CLI like: + +``assume -c tiny -s example_01a --db-uri postgresql://assume:assume@localhost:5432/assume`` + +.. _amiris: +AMIRIS +------ + +The AMIRIS loader can be used to run examples configured for usage with the energy market simulation tool `AMIRIS by the DLR `_. + +.. code-block:: python + + from assume import World + from assume.scenario import load_amiris_async + + # To download some amiris examples run: + # git clone https://gitlab.com/dlr-ve/esy/amiris/examples.git amiris-examples + # next to the assume folder + base_path = f"../amiris-examples/Germany2019" + + # Read the scenario from this base path + amiris_scenario = read_amiris_yaml(base_path) + + # Configure where to write the output + db_uri = "postgresql://assume:assume@localhost:5432/assume" + + # Create a simulation world + world = World(database_uri=db_uri) + + # Let the loader add everything to the world + world.loop.run_until_complete( + load_amiris_async( + world, + "amiris", + scenario, + base_path, + ) + ) + + # Run the scenario + world.run() + +This makes it possible to compare or validate results from AMIRIS. +If you want to adjust the scenario or change bidding strategies, you currently have to adjust the amiris loader accordingly, +as it currently does not use reinforcement learning or different bidding strategies at all. +It tries to resemble the behavior of AMIRIS in the best way possible. +As AMIRIS currently only supports a single market design (with different support mechanisms), the market design can not be adjusted. + +.. _oeds: + +OEDS +---- + +`The Open-Energy-Data-Server `_ is a tool that facilitates the aggregation of open research data in a way that allows for easy reuse and structured work. It includes data from the `Marktstammdatenregister of Germany `_, `ENTSO-E `_, and weather datasets, making it versatile for modeling different localized scenarios. + +Once you have an Open-Energy-Data-Server running, you can query data for various scenarios and interactively compare your simulation results with the actual data recorded by ENTSO-E using Grafana. + +The main configuration required for the Open-Energy-Data-Server involves specifying the `NUTS areas `_ that should be simulated, as well as a marketdesign. +An example configuration of how this can be used is shown here: + +.. code-block:: python + + # where to write the simulation output to - can also be the oeds + db_uri = "postgresql://assume:assume@localhost:5432/assume" + world = World(database_uri=db_uri) + # adjust to your institute's database server + infra_uri = "postgresql://readonly:readonly@myoeds-server:5432" + + # you can also just use ["DE"] for a simulation of germany with single agents per generation technology + nuts_config = ["DE1", "DEA", "DEB", "DEC", "DED", "DEE", "DEF"] + + # define a marketdesign which can be used for the simulation + marketdesign = [ + MarketConfig( + "EOM", + rr.rrule(rr.HOURLY, interval=24, dtstart=start, until=end), + timedelta(hours=1), + "pay_as_clear", + [MarketProduct(timedelta(hours=1), 24, timedelta(hours=1))], + additional_fields=["block_id", "link", "exclusive_id"], + maximum_bid_volume=1e9, + maximum_bid_price=1e9, + ) + ] + # load the dataset from the database + load_oeds(world, "oeds_mastr_simulation", "my_studycase", infra_uri, marketdesign, nuts_config) + + # Run the scenario + world.run() + +If there are different diff --git a/examples/examples.py b/examples/examples.py index 412124e3..1a5f7658 100644 --- a/examples/examples.py +++ b/examples/examples.py @@ -6,7 +6,8 @@ import logging import os -from assume import World, load_scenario_folder, run_learning +from assume import World +from assume.scenario.loader_csv import load_scenario_folder, run_learning log = logging.getLogger(__name__) diff --git a/pyproject.toml b/pyproject.toml index 7514104f..450c4664 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,10 @@ pytest-cov = {version = "^4.1.0", optional = true} pytest-asyncio = {version = "^0.21.1", optional = true} torch = {version = "^2.0.1", optional = true} glpk = {version = "^0.4.7", optional = true} +windpowerlib = {git = "https://github.com/maurerle/windpowerlib", rev = "maurerle"} +pvlib = {version = "^0.10.2", optional = true} +holidays = {version = "^0.37", optional = true} +demandlib = {version = "^0.1.9", optional = true} [tool.poetry.group.dev.dependencies] black = "^23.3.0" diff --git a/tests/test_dmas_powerplant.py b/tests/test_dmas_powerplant.py index 09596597..5687ed9b 100644 --- a/tests/test_dmas_powerplant.py +++ b/tests/test_dmas_powerplant.py @@ -31,7 +31,7 @@ def power_plant_1() -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", bidding_strategies={"energy": DmasPowerplantStrategy()}, index=index, max_power=1000, @@ -61,7 +61,7 @@ def power_plant_day(fuel_type="lignite") -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", bidding_strategies={"energy": DmasPowerplantStrategy()}, index=index, max_power=1000, diff --git a/tests/test_flexable_strategies.py b/tests/test_flexable_strategies.py index df1df450..68a4b2f1 100644 --- a/tests/test_flexable_strategies.py +++ b/tests/test_flexable_strategies.py @@ -24,7 +24,7 @@ def power_plant() -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", index=index, max_power=1000, min_power=200, diff --git a/tests/test_powerplant.py b/tests/test_powerplant.py index f8925eca..e520b55b 100644 --- a/tests/test_powerplant.py +++ b/tests/test_powerplant.py @@ -22,7 +22,7 @@ def power_plant_1() -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", bidding_strategies={"energy": NaiveStrategy()}, index=index, max_power=1000, @@ -43,7 +43,7 @@ def power_plant_2() -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", bidding_strategies={"energy": NaiveStrategy()}, index=index, max_power=1000, @@ -64,7 +64,7 @@ def power_plant_3() -> PowerPlant: return PowerPlant( id="test_pp", unit_operator="test_operator", - technology="coal", + technology="hard coal", bidding_strategies={"energy": NaiveStrategy()}, index=index, max_power=1000, @@ -81,7 +81,7 @@ def power_plant_3() -> PowerPlant: def test_init_function(power_plant_1, power_plant_2, power_plant_3): assert power_plant_1.id == "test_pp" assert power_plant_1.unit_operator == "test_operator" - assert power_plant_1.technology == "coal" + assert power_plant_1.technology == "hard coal" assert power_plant_1.max_power == 1000 assert power_plant_1.min_power == 200 assert power_plant_1.efficiency == 0.5 diff --git a/tests/test_utils.py b/tests/test_utils.py index 774fde6a..02cacdf5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -10,7 +10,6 @@ from dateutil import rrule as rr from assume.common.market_objects import MarketConfig, MarketProduct -from assume.common.scenario_loader import convert_to_rrule_freq, make_market_config from assume.common.utils import ( aggregate_step_amount, get_available_products, @@ -19,6 +18,7 @@ separate_orders, visualize_orderbook, ) +from assume.scenario.loader_csv import convert_to_rrule_freq, make_market_config from .utils import create_orderbook diff --git a/tests/test_world.py b/tests/test_world.py index b8ac6ad2..f441b1e6 100644 --- a/tests/test_world.py +++ b/tests/test_world.py @@ -4,7 +4,8 @@ import asyncio -from assume import World, load_scenario_folder +from assume import World +from assume.scenario.loader_csv import load_scenario_folder def test_world(): From cff691d8d9b3474826fee890b9d1e7c1e1cbee2c Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 15 Jan 2024 13:24:54 +0100 Subject: [PATCH 2/2] use __version__ from pyproject.toml (#267) This fixes using the version as noted here: https://github.com/assume-framework/assume/pull/264#discussion_r1450652770 --- assume/__init__.py | 4 +++- assume/scenario/loader_oeds.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/assume/__init__.py b/assume/__init__.py index 9116adac..22583ff5 100644 --- a/assume/__init__.py +++ b/assume/__init__.py @@ -2,10 +2,12 @@ # # SPDX-License-Identifier: AGPL-3.0-or-later +from importlib.metadata import version + from assume.common import MarketConfig, MarketProduct from assume.world import World -__version__ = "0.0.1" +__version__ = version("assume-framework") __author__ = "ASSUME Developers: Nick Harder, Kim Miskiw, Florian Maurer, Manish Khanra" __copyright__ = "AGPL-3.0 License" diff --git a/assume/scenario/loader_oeds.py b/assume/scenario/loader_oeds.py index 9ef25856..3954a2d6 100644 --- a/assume/scenario/loader_oeds.py +++ b/assume/scenario/loader_oeds.py @@ -1,6 +1,7 @@ # SPDX-FileCopyrightText: ASSUME Developers # # SPDX-License-Identifier: AGPL-3.0-or-later + import logging import os import shutil