From c79c749bdab2c76b58187b38f77afbfdf158dcd1 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Fri, 15 Mar 2024 17:27:34 +0100 Subject: [PATCH 1/8] fixes to grid presentation and pypsa --- assume/common/grid_utils.py | 12 +- assume/common/outputs.py | 15 +- assume/markets/base_market.py | 4 +- .../clearing_algorithms/nodal_pricing.py | 2 +- .../markets/clearing_algorithms/redispatch.py | 2 +- assume/scenario/loader_pypsa.py | 19 +- .../dashboard-definitions/ASSUME_nodal.json | 216 ++++-------------- examples/inputs/example_01d/config.yaml | 4 + examples/inputs/example_01d/lines.csv | 2 +- 9 files changed, 84 insertions(+), 192 deletions(-) diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index 7c178693..ea0a6972 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -235,18 +235,10 @@ def read_pypsa_grid( """ def add_buses(network: pypsa.Network, buses: pd.DataFrame) -> None: - network.madd( - "Bus", - names=buses.index, - **buses, - ) + network.import_components_from_dataframe(buses, "Bus") def add_lines(network: pypsa.Network, lines: pd.DataFrame) -> None: - network.madd( - "Line", - names=lines.index, - **lines, - ) + network.import_components_from_dataframe(lines, "Line") # setup the network add_buses(network, grid_dict["buses"]) diff --git a/assume/common/outputs.py b/assume/common/outputs.py index 7b833388..98ddd917 100644 --- a/assume/common/outputs.py +++ b/assume/common/outputs.py @@ -12,7 +12,7 @@ import pandas as pd from dateutil import rrule as rr from mango import Role -from pandas.api.types import is_numeric_dtype +from pandas.api.types import is_numeric_dtype, is_bool_dtype from psycopg2.errors import UndefinedColumn from sqlalchemy import inspect, text from sqlalchemy.exc import DataError, OperationalError, ProgrammingError @@ -352,7 +352,12 @@ def check_columns(self, table: str, df: pd.DataFrame, index: bool = True): if column.lower() not in db_columns: try: # TODO this only works for float and text - column_type = "float" if is_numeric_dtype(df[column]) else "text" + if is_bool_dtype(df[column]): + column_type = "boolean" + elif is_numeric_dtype(df[column]): + column_type = "float" + else: + column_type = "text" query = f"ALTER TABLE {table} ADD COLUMN {column} {column_type}" with self.db.begin() as db: db.execute(text(query)) @@ -411,6 +416,12 @@ def write_market_orders(self, market_orders: any, market_id: str): del df["only_hours"] del df["agent_id"] + if "bid_type" not in df.columns: + df["bid_type"] = None + + if "node" not in df.columns: + df["node"] = None + df["simulation"] = self.simulation_id df["market_id"] = market_id diff --git a/assume/markets/base_market.py b/assume/markets/base_market.py index 3e4033e8..03ac6fa4 100644 --- a/assume/markets/base_market.py +++ b/assume/markets/base_market.py @@ -208,7 +208,9 @@ def setup(self): self.marketconfig.aid = self.context.aid for field in self.required_fields: - assert field in self.marketconfig.additional_fields, "missing field" + assert ( + field in self.marketconfig.additional_fields + ), f"{field} missing from additional_fiels" def accept_orderbook(content: OrderBookMessage, meta: MetaDict): if not isinstance(content, dict): diff --git a/assume/markets/clearing_algorithms/nodal_pricing.py b/assume/markets/clearing_algorithms/nodal_pricing.py index f71ea6dd..7e626b35 100644 --- a/assume/markets/clearing_algorithms/nodal_pricing.py +++ b/assume/markets/clearing_algorithms/nodal_pricing.py @@ -39,7 +39,7 @@ class NodalMarketRole(MarketRole): """ - required_fields = ["node"] + required_fields = ["node", "max_power", "min_power"] def __init__(self, marketconfig: MarketConfig): super().__init__(marketconfig) diff --git a/assume/markets/clearing_algorithms/redispatch.py b/assume/markets/clearing_algorithms/redispatch.py index 5e385587..363291da 100644 --- a/assume/markets/clearing_algorithms/redispatch.py +++ b/assume/markets/clearing_algorithms/redispatch.py @@ -41,7 +41,7 @@ class RedispatchMarketRole(MarketRole): """ - required_fields = ["node"] + required_fields = ["node", "max_power", "min_power"] def __init__(self, marketconfig: MarketConfig): super().__init__(marketconfig) diff --git a/assume/scenario/loader_pypsa.py b/assume/scenario/loader_pypsa.py index bde3d42c..bb9336d9 100644 --- a/assume/scenario/loader_pypsa.py +++ b/assume/scenario/loader_pypsa.py @@ -55,12 +55,19 @@ async def load_pypsa_async( mo_id = "market_operator" world.add_market_operator(id=mo_id) + + network.generators.rename( + columns={"bus": "node", "p_nom": "max_power"}, inplace=True + ) + + network.loads.rename(columns={"bus": "node", "p_nom": "max_power"}, inplace=True) grid_data = { "buses": network.buses, "lines": network.lines, "generators": network.generators, "loads": network.loads, } + for market_config in marketdesign: market_config.param_dict["grid_data"] = grid_data world.add_market(mo_id, market_config) @@ -74,7 +81,7 @@ async def load_pypsa_async( unit_type = "power_plant" - max_power = generator.p_nom or 1000 + max_power = generator.max_power or 1000 # if p_nom is not set, generator.p_nom_extendable must be ramp_up = generator.ramp_limit_start_up * max_power ramp_down = generator.ramp_limit_shut_down * max_power @@ -86,8 +93,8 @@ async def load_pypsa_async( "min_power": generator.p_nom_min, "max_power": max_power, "bidding_strategies": bidding_strategies[generator.name], - "technology": "demand", - "node": generator.bus, + "technology": "conventional", + "node": generator.node, "efficiency": generator.efficiency, "fuel_type": generator.carrier, "ramp_up": ramp_up, @@ -120,7 +127,7 @@ async def load_pypsa_async( "max_power": load_t.max(), "bidding_strategies": bidding_strategies[load.name], "technology": "demand", - "node": load.bus, + "node": load.node, "price": 1e3, }, NaiveForecast(index, demand=load_t), @@ -177,6 +184,8 @@ async def load_pypsa_async( print("invalid studycase") network = pd.DataFrame() + study_case += market_mechanism + start = network.snapshots[0] end = network.snapshots[-1] marketdesign = [ @@ -186,7 +195,7 @@ async def load_pypsa_async( timedelta(hours=1), market_mechanism, [MarketProduct(timedelta(hours=1), 1, timedelta(hours=1))], - additional_fields=["node"], + additional_fields=["node", "max_power", "min_power"], maximum_bid_volume=1e9, maximum_bid_price=1e9, ) diff --git a/docker_configs/dashboard-definitions/ASSUME_nodal.json b/docker_configs/dashboard-definitions/ASSUME_nodal.json index 0c1388aa..d545f208 100644 --- a/docker_configs/dashboard-definitions/ASSUME_nodal.json +++ b/docker_configs/dashboard-definitions/ASSUME_nodal.json @@ -50,7 +50,7 @@ "content": "# Nodal Analysing Board\n\nThis board can be used to visualize the Grid statistics and usage.\nAs well as the prices at different locations of the grid.", "mode": "markdown" }, - "pluginVersion": "9.2.15", + "pluginVersion": "10.4.0", "title": "Nodal Dashboard", "type": "text" }, @@ -88,7 +88,7 @@ "content": "# Market-specific Data\n\nData specific for the market depending on the choice made at te top of the panel\n\n", "mode": "markdown" }, - "pluginVersion": "9.2.15", + "pluginVersion": "10.4.0", "targets": [ { "datasource": { @@ -253,7 +253,7 @@ }, { "datasource": { - "type": "postgres", + "type": "grafana-postgresql-datasource", "uid": "P7B13B9DF907EC40C" }, "description": "Overview of market results for the chossen market", @@ -263,6 +263,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -276,6 +277,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "stepAfter", "lineStyle": { "fill": "solid" @@ -314,58 +316,17 @@ "overrides": [ { "matcher": { - "id": "byFrameRefID", - "options": "Volume" + "id": "byRegexp", + "options": "/Price .*/" }, "properties": [ - { - "id": "custom.axisPlacement", - "value": "right" - }, { "id": "unit", - "value": "MWh" - } - ] - }, - { - "matcher": { - "id": "byFrameRefID", - "options": "Price" - }, - "properties": [ - { - "id": "custom.axisPlacement", - "value": "left" + "value": "€/MWh" }, { - "id": "unit", - "value": "EUR/MWh" - } - ] - }, - { - "__systemRef": "hideSeriesFrom", - "matcher": { - "id": "byNames", - "options": { - "mode": "exclude", - "names": [ - "Price north", - "Price south" - ], - "prefix": "All except:", - "readOnly": true - } - }, - "properties": [ - { - "id": "custom.hideFrom", - "value": { - "legend": false, - "tooltip": false, - "viz": true - } + "id": "custom.axisPlacement", + "value": "auto" } ] } @@ -406,11 +367,13 @@ "type": "postgres", "uid": "P7B13B9DF907EC40C" }, + "editorMode": "code", "format": "time_series", "group": [], + "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n product_start AS \"time\",\n demand_volume AS \"Demand volume\",\n supply_volume AS \"Supply volume\",\n node_id as \"Node\"\nFROM market_meta\nWHERE (\"simulation\" LIKE '$simulation') AND (\"market_id\" LIKE '$market') AND $__timeFilter(product_start)\nGROUP BY market_id, simulation, product_start, demand_volume, supply_volume, node_id\nORDER BY 1;\n", + "rawSql": "SELECT\n $__timeGroupAlias(product_start,$__interval),\n node as \"Node\",\n avg(demand_volume) AS \"Demand volume\",\n avg(supply_volume) AS \"Supply volume\",\n avg(price) AS \"Price\"\nFROM market_meta\nWHERE (\"simulation\" LIKE '$simulation') AND \"market_id\" = '$market' AND $__timeFilter(product_start)\nGROUP BY 1, node\nORDER BY 1;\n", "refId": "Volume", "select": [ [ @@ -422,39 +385,23 @@ } ] ], - "table": "market_meta", - "timeColumn": "product_start", - "timeColumnType": "timestamp", - "where": [ - { - "name": "$__timeFilter", - "params": [], - "type": "macro" - } - ] - }, - { - "datasource": { - "type": "postgres", - "uid": "P7B13B9DF907EC40C" - }, - "format": "time_series", - "group": [], - "hide": false, - "metricColumn": "none", - "rawQuery": true, - "rawSql": "SELECT\n product_start AS \"time\",\n price AS \"Price\",\n node_id as \"Node\"\nFROM market_meta\nWHERE (\"simulation\" LIKE '$simulation') AND (\"market_id\" LIKE '$market') AND $__timeFilter(product_start)\nGROUP BY market_id, simulation, product_start, price, node_id\nORDER BY 1;\n", - "refId": "Price", - "select": [ - [ + "sql": { + "columns": [ { - "params": [ - "supply_volume" - ], - "type": "column" + "parameters": [], + "type": "function" } - ] - ], + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, "table": "market_meta", "timeColumn": "product_start", "timeColumnType": "timestamp", @@ -482,6 +429,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -495,6 +443,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -533,7 +482,7 @@ "h": 9, "w": 12, "x": 0, - "y": 56 + "y": 144 }, "id": 19, "options": { @@ -659,6 +608,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -672,6 +622,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -710,7 +661,7 @@ "h": 9, "w": 12, "x": 12, - "y": 56 + "y": 144 }, "id": 20, "options": { @@ -825,17 +776,16 @@ "type": "timeseries" } ], - "refresh": false, - "schemaVersion": 37, - "style": "dark", + "refresh": "", + "schemaVersion": 39, "tags": [], "templating": { "list": [ { "current": { - "selected": true, - "text": "example_01d_nodal_case", - "value": "example_01d_nodal_case" + "selected": false, + "text": "world_pypsa_scigrid_deredispatch", + "value": "world_pypsa_scigrid_deredispatch" }, "datasource": { "type": "postgres", @@ -858,8 +808,8 @@ { "current": { "selected": false, - "text": "nodal", - "value": "nodal" + "text": "EOM", + "value": "EOM" }, "datasource": { "type": "postgres", @@ -878,88 +828,12 @@ "skipUrlSync": false, "sort": 1, "type": "query" - }, - { - "current": { - "selected": false, - "text": "Unit 1", - "value": "Unit 1" - }, - "datasource": { - "type": "postgres", - "uid": "P7B13B9DF907EC40C" - }, - "definition": "SELECT index\nFROM power_plant_meta\nwhere simulation = '$simulation';", - "description": "Can choose which units we want to display ", - "hide": 0, - "includeAll": false, - "label": "", - "multi": true, - "name": "Gen_Units", - "options": [], - "query": "SELECT index\nFROM power_plant_meta\nwhere simulation = '$simulation';", - "refresh": 2, - "regex": "", - "skipUrlSync": false, - "sort": 1, - "type": "query" - }, - { - "current": { - "selected": false, - "text": "demand_north", - "value": "demand_north" - }, - "datasource": { - "type": "postgres", - "uid": "P7B13B9DF907EC40C" - }, - "definition": "SELECT index\nFROM demand_meta\nwhere simulation = '$simulation';", - "description": "Can choose which units we want to display ", - "hide": 0, - "includeAll": false, - "label": "", - "multi": true, - "name": "Demand_Units", - "options": [], - "query": "SELECT index\nFROM demand_meta\nwhere simulation = '$simulation';", - "refresh": 2, - "regex": "", - "skipUrlSync": false, - "sort": 1, - "type": "query" - }, - { - "current": { - "isNone": true, - "selected": false, - "text": "None", - "value": "" - }, - "datasource": { - "type": "postgres", - "uid": "P7B13B9DF907EC40C" - }, - "definition": "SELECT index\nFROM storage_meta\nwhere simulation = '$simulation';", - "description": "Can choose which storage units we want to display ", - "hide": 0, - "includeAll": false, - "label": "", - "multi": true, - "name": "Storage_Units", - "options": [], - "query": "SELECT index\nFROM storage_meta\nwhere simulation = '$simulation';", - "refresh": 2, - "regex": "", - "skipUrlSync": false, - "sort": 1, - "type": "query" } ] }, "time": { - "from": "2018-12-31T23:00:00.000Z", - "to": "2019-01-15T22:59:59.000Z" + "from": "2011-01-01T00:23:03.353Z", + "to": "2011-01-01T03:42:03.926Z" }, "timepicker": { "refresh_intervals": [ @@ -973,6 +847,6 @@ "timezone": "", "title": "ASSUME - Nodal view", "uid": "nodalview", - "version": 2, + "version": 5, "weekStart": "" -} +} \ No newline at end of file diff --git a/examples/inputs/example_01d/config.yaml b/examples/inputs/example_01d/config.yaml index e0e9a2ba..dd436216 100644 --- a/examples/inputs/example_01d/config.yaml +++ b/examples/inputs/example_01d/config.yaml @@ -44,6 +44,8 @@ base: market_mechanism: redispatch additional_fields: - node + - min_power + - max_power param_dict: network_path: . solver: glpk @@ -76,6 +78,8 @@ nodal_case: market_mechanism: nodal additional_fields: - node + - min_power + - max_power param_dict: network_path: . solver: glpk diff --git a/examples/inputs/example_01d/lines.csv b/examples/inputs/example_01d/lines.csv index 92c84f00..f255b88e 100644 --- a/examples/inputs/example_01d/lines.csv +++ b/examples/inputs/example_01d/lines.csv @@ -1,2 +1,2 @@ name,bus0,bus1,s_nom,x,r -0,north,south,5000,0.01,0.001 +0,north,south,5000.0,0.01,0.001 From c2c315fc9121e74a46f2e7bb5dc29891772966ae Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 18 Mar 2024 10:39:12 +0100 Subject: [PATCH 2/8] add refresh_time of 5s various fixes - fix amiris loader simple - remove duplicate fields if using pypsa grid --- assume/common/grid_utils.py | 37 ++++++++++--------- assume/common/outputs.py | 6 ++- assume/scenario/loader_amiris.py | 2 +- cli.py | 2 - .../dashboard-definitions/ASSUME.json | 1 + .../ASSUME_Learning.json | 1 + .../dashboard-definitions/ASSUME_nodal.json | 1 + 7 files changed, 27 insertions(+), 23 deletions(-) diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index ea0a6972..7d5ac174 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -150,18 +150,18 @@ def add_loads( network (pypsa.Network): the pypsa network to which the loads are loads (pandas.DataFrame): the loads dataframe """ - p_set = pd.DataFrame( - np.zeros((len(network.snapshots), len(loads.index))), - index=network.snapshots, - columns=loads.index, - ) + if "p_set" not in loads.columns: + loads["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) # add loads network.madd( "Load", names=loads.index, bus=loads["node"], # bus to which the generator is connected to - p_set=p_set, **loads, ) @@ -173,21 +173,23 @@ def add_redispatch_loads( """ This adds loads to the redispatch PyPSA network with respective bus data to which they are connected """ - - p_set = pd.DataFrame( - np.zeros((len(network.snapshots), len(loads.index))), - index=network.snapshots, - columns=loads.index, - ) - - # add loads with opposite sing (default for loads is -1). This is needed to properly model the redispatch + loads_c = loads.copy() + if "sign" in loads_c.columns: + del loads_c["sign"] + + if "p_set" not in loads.columns: + loads["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + # add loads with opposite sign (default for loads is -1). This is needed to properly model the redispatch network.madd( "Load", names=loads.index, bus=loads["node"], # bus to which the generator is connected to - p_set=p_set, sign=1, - **loads, + **loads_c, ) @@ -200,7 +202,6 @@ def add_nodal_loads( The loads are added as generators with negative sign so their dispatch can be also curtailed, since regular load in PyPSA represents only an inelastic demand. """ - p_set = pd.DataFrame( np.zeros((len(network.snapshots), len(loads.index))), index=network.snapshots, @@ -287,7 +288,7 @@ def calculate_network_meta(network, product: MarketProduct, i: int): "demand_volume_energy": demand_volume * duration_hours, "supply_volume_energy": supply_volume * duration_hours, "price": price, - "node_id": bus, + "node": bus, "product_start": product[0], "product_end": product[1], "only_hours": product[2], diff --git a/assume/common/outputs.py b/assume/common/outputs.py index 98ddd917..aed5ec2e 100644 --- a/assume/common/outputs.py +++ b/assume/common/outputs.py @@ -13,7 +13,7 @@ from dateutil import rrule as rr from mango import Role from pandas.api.types import is_numeric_dtype, is_bool_dtype -from psycopg2.errors import UndefinedColumn +from psycopg2.errors import UndefinedColumn, InvalidTextRepresentation from sqlalchemy import inspect, text from sqlalchemy.exc import DataError, OperationalError, ProgrammingError @@ -330,8 +330,10 @@ def load_wkt(string: str): # now try again with self.db.begin() as db: df.to_postgis(geo_table, db, if_exists="append", index=True) - except ImportError: + except (ImportError, InvalidTextRepresentation): # otherwise, just use plain SQL anyway + # this is also needed if Int/Float is bad configured in the database + # try to input as normal dataframe with self.db.begin() as db: df.to_sql(geo_table, db, if_exists="append") diff --git a/assume/scenario/loader_amiris.py b/assume/scenario/loader_amiris.py index db961eb5..0e1b703d 100644 --- a/assume/scenario/loader_amiris.py +++ b/assume/scenario/loader_amiris.py @@ -482,7 +482,7 @@ async def load_amiris_async( sim_id = f"{scenario}_{study_case}" save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"] // 4 prices = {} - index = pd.date_range(start=start, end=end, freq="1h", inclusive="left") + index = pd.date_range(start=start, end=end, freq="1h") world.bidding_strategies["support"] = SupportStrategy await world.setup( start=start, diff --git a/cli.py b/cli.py index e837f926..271928b7 100644 --- a/cli.py +++ b/cli.py @@ -16,8 +16,6 @@ import yaml from sqlalchemy import make_url -from assume.common.exceptions import AssumeException - os.makedirs("./examples/outputs", exist_ok=True) os.makedirs("./examples/local_db", exist_ok=True) diff --git a/docker_configs/dashboard-definitions/ASSUME.json b/docker_configs/dashboard-definitions/ASSUME.json index 8204d49a..1d07c3f6 100644 --- a/docker_configs/dashboard-definitions/ASSUME.json +++ b/docker_configs/dashboard-definitions/ASSUME.json @@ -4336,6 +4336,7 @@ }, "timepicker": { "refresh_intervals": [ + "5s", "1m", "5m", "15m", diff --git a/docker_configs/dashboard-definitions/ASSUME_Learning.json b/docker_configs/dashboard-definitions/ASSUME_Learning.json index 33061ff8..7fd4c561 100644 --- a/docker_configs/dashboard-definitions/ASSUME_Learning.json +++ b/docker_configs/dashboard-definitions/ASSUME_Learning.json @@ -2649,6 +2649,7 @@ }, "timepicker": { "refresh_intervals": [ + "5s", "1m", "5m", "15m", diff --git a/docker_configs/dashboard-definitions/ASSUME_nodal.json b/docker_configs/dashboard-definitions/ASSUME_nodal.json index d545f208..919f62c9 100644 --- a/docker_configs/dashboard-definitions/ASSUME_nodal.json +++ b/docker_configs/dashboard-definitions/ASSUME_nodal.json @@ -837,6 +837,7 @@ }, "timepicker": { "refresh_intervals": [ + "5s", "1m", "5m", "15m", From 20825599a71eefc83d09e3574c0794730652b3a6 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 18 Mar 2024 10:39:38 +0100 Subject: [PATCH 3/8] revert pd.assign to pd.concat to remove highly fragmented pandas PerformanceWarning --- assume/common/forecasts.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/assume/common/forecasts.py b/assume/common/forecasts.py index 99410225..52fd5734 100644 --- a/assume/common/forecasts.py +++ b/assume/common/forecasts.py @@ -171,7 +171,10 @@ def set_forecast(self, data: pd.DataFrame | pd.Series | None, prefix=""): self.forecasts[column] = data[column].item() else: # Add new columns to the existing DataFrame, overwriting any existing columns with the same names - self.forecasts = self.forecasts.assign(**data) + new_columns = set(data.columns) - set(self.forecasts.columns) + self.forecasts = pd.concat( + [self.forecasts, data[list(new_columns)]], axis=1 + ) else: self.forecasts[prefix + data.name] = data From acc27cf7b7269679002e3b0b448f47664691ac61 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 18 Mar 2024 10:40:12 +0100 Subject: [PATCH 4/8] further fixes for usage of pypsa with grid utils --- assume/common/grid_utils.py | 19 +++++++++++++------ assume/scenario/loader_pypsa.py | 7 +++---- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index 7d5ac174..a9740c08 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -34,9 +34,9 @@ def add_generators( names=generators.index, bus=generators["node"], # bus to which the generator is connected to p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator - p_min_pu=p_set, - p_max_pu=p_set + 1, - marginal_cost=p_set, + #p_min_pu=p_set, + #p_max_pu=p_set + 1, + #marginal_cost=p_set, **generators, ) @@ -175,7 +175,7 @@ def add_redispatch_loads( """ loads_c = loads.copy() if "sign" in loads_c.columns: - del loads_c["sign"] + del loads_c["sign"] if "p_set" not in loads.columns: loads["p_set"] = pd.DataFrame( @@ -207,6 +207,10 @@ def add_nodal_loads( index=network.snapshots, columns=loads.index, ) + loads_c = loads.copy() + + if "sign" in loads_c.columns: + del loads_c["sign"] # add loads as negative generators network.madd( @@ -218,7 +222,7 @@ def add_nodal_loads( p_max_pu=p_set + 1, marginal_cost=p_set, sign=-1, - **loads, + **loads_c, ) @@ -279,7 +283,10 @@ def calculate_network_meta(network, product: MarketProduct, i: int): supply_volume = dispatch_for_bus[dispatch_for_bus > 0].sum() demand_volume = dispatch_for_bus[dispatch_for_bus < 0].sum() - price = network.buses_t.marginal_price[bus].iat[i] + if not network.buses_t.marginal_price.empty: + price = network.buses_t.marginal_price[str(bus)].iat[i] + else: + price = 0 meta.append( { diff --git a/assume/scenario/loader_pypsa.py b/assume/scenario/loader_pypsa.py index bb9336d9..675fa5d2 100644 --- a/assume/scenario/loader_pypsa.py +++ b/assume/scenario/loader_pypsa.py @@ -59,8 +59,7 @@ async def load_pypsa_async( network.generators.rename( columns={"bus": "node", "p_nom": "max_power"}, inplace=True ) - - network.loads.rename(columns={"bus": "node", "p_nom": "max_power"}, inplace=True) + network.loads.rename(columns={"bus": "node", "p_set": "min_power"}, inplace=True) grid_data = { "buses": network.buses, "lines": network.lines, @@ -170,8 +169,8 @@ async def load_pypsa_async( world = World(database_uri=db_uri) scenario = "world_pypsa" study_case = "scigrid_de" - # "pay_as_clear" or "redispatch" - market_mechanism = "redispatch" + # "pay_as_clear", "redispatch" or "nodal" + market_mechanism = "nodal" match study_case: case "ac_dc_meshed": From acef3d2e36da8b0cd91dfc94135f59697ad9dee0 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 18 Mar 2024 14:42:34 +0100 Subject: [PATCH 5/8] remove complexity from outputs fix first hour not cleared in amiris --- assume/common/outputs.py | 35 +++++++------------------------- assume/scenario/loader_amiris.py | 17 +++++++++------- 2 files changed, 17 insertions(+), 35 deletions(-) diff --git a/assume/common/outputs.py b/assume/common/outputs.py index aed5ec2e..3d3da01f 100644 --- a/assume/common/outputs.py +++ b/assume/common/outputs.py @@ -306,36 +306,15 @@ def create_line(row): df.reset_index() try: - # try to use geopandas - # needed for postGIS writing - import geoalchemy2 - import geopandas as gpd - from shapely.wkt import loads - - def load_wkt(string: str): - return loads(string.split(";")[1]) - - df["geometry"] = df["wkt_srid_4326"].apply(load_wkt) - df = gpd.GeoDataFrame(df, geometry="geometry") - df.set_crs(crs="EPSG:4326", inplace=True) - # postgis does not lowercase tablenames - df.columns = map(str.lower, df.columns) - try: - # try to input as geodataframe - with self.db.begin() as db: - df.to_postgis(geo_table, db, if_exists="append", index=True) - except (ProgrammingError, OperationalError, DataError, UndefinedColumn): - # if a column is missing, check and try again - self.check_columns(geo_table, df) - # now try again - with self.db.begin() as db: - df.to_postgis(geo_table, db, if_exists="append", index=True) - except (ImportError, InvalidTextRepresentation): - # otherwise, just use plain SQL anyway - # this is also needed if Int/Float is bad configured in the database - # try to input as normal dataframe with self.db.begin() as db: df.to_sql(geo_table, db, if_exists="append") + except (ProgrammingError, OperationalError, DataError, UndefinedColumn): + # if a column is missing, check and try again + self.check_columns(geo_table, df) + # now try again + with self.db.begin() as db: + df.to_sql(geo_table, db, if_exists="append") + def check_columns(self, table: str, df: pd.DataFrame, index: bool = True): """ diff --git a/assume/scenario/loader_amiris.py b/assume/scenario/loader_amiris.py index 0e1b703d..20317cf0 100644 --- a/assume/scenario/loader_amiris.py +++ b/assume/scenario/loader_amiris.py @@ -189,7 +189,7 @@ def add_agent_to_world( clearing_section["DistributionMethod"] ], market_products=[ - MarketProduct(timedelta(hours=1), 24, timedelta(hours=1)) + MarketProduct(timedelta(hours=1), 24, timedelta(hours=0)) ], maximum_bid_volume=1e6, ) @@ -471,18 +471,21 @@ async def load_amiris_async( amiris_scenario = read_amiris_yaml(base_path) # DeliveryIntervalInSteps = 3600 # In practice - this seems to be a fixed number in AMIRIS - start = amiris_scenario["GeneralProperties"]["Simulation"]["StartTime"] - start = pd.to_datetime(start, format="%Y-%m-%d_%H:%M:%S") + simulation = amiris_scenario["GeneralProperties"]["Simulation"] + start = pd.to_datetime(simulation["StartTime"], format="%Y-%m-%d_%H:%M:%S") if calendar.isleap(start.year): + # AMIRIS does not considerate leap years start += timedelta(days=1) - end = amiris_scenario["GeneralProperties"]["Simulation"]["StopTime"] - end = pd.to_datetime(end, format="%Y-%m-%d_%H:%M:%S") + end = pd.to_datetime(simulation["StopTime"], format="%Y-%m-%d_%H:%M:%S") # AMIRIS caveat: start and end is always two minutes before actual start start += timedelta(minutes=2) + end += timedelta(minutes=2) sim_id = f"{scenario}_{study_case}" - save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"] // 4 + save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"] + print(save_interval) + #save_interval = 1 prices = {} - index = pd.date_range(start=start, end=end, freq="1h") + index = pd.date_range(start=start, end=end, freq="1h", inclusive="left") world.bidding_strategies["support"] = SupportStrategy await world.setup( start=start, From 5b8ffe349be76e5e364227362aa6d80b414fed40 Mon Sep 17 00:00:00 2001 From: Florian Maurer Date: Mon, 18 Mar 2024 15:22:22 +0100 Subject: [PATCH 6/8] add example on interoperability fixes for grid integration --- assume/common/grid_utils.py | 38 +- assume/common/outputs.py | 5 +- assume/scenario/loader_amiris.py | 2 - assume/scenario/loader_pypsa.py | 2 + .../dashboard-definitions/ASSUME_nodal.json | 2 +- examples/notebooks/07_interoperability.ipynb | 405 ++++++++++++++++++ .../07_interoperability.ipynb.license | 3 + 7 files changed, 435 insertions(+), 22 deletions(-) create mode 100644 examples/notebooks/07_interoperability.ipynb create mode 100644 examples/notebooks/07_interoperability.ipynb.license diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index a9740c08..d3f3e211 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -27,16 +27,19 @@ def add_generators( index=network.snapshots, columns=generators.index, ) - + gen_c = generators.copy() + if "p_min_pu" not in gen_c.columns: + gen_c["p_min_pu"] = p_set + if "p_max_pu" not in gen_c.columns: + gen_c["p_max_pu"] = p_set + 1 + if "marginal_cost" not in gen_c.columns: + gen_c["marginal_cost"] = p_set # add generators network.madd( "Generator", names=generators.index, bus=generators["node"], # bus to which the generator is connected to p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator - #p_min_pu=p_set, - #p_max_pu=p_set + 1, - #marginal_cost=p_set, **generators, ) @@ -150,12 +153,6 @@ def add_loads( network (pypsa.Network): the pypsa network to which the loads are loads (pandas.DataFrame): the loads dataframe """ - if "p_set" not in loads.columns: - loads["p_set"] = pd.DataFrame( - np.zeros((len(network.snapshots), len(loads.index))), - index=network.snapshots, - columns=loads.index, - ) # add loads network.madd( @@ -165,6 +162,13 @@ def add_loads( **loads, ) + if "p_set" not in loads.columns: + network.loads_t["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + def add_redispatch_loads( network: pypsa.Network, @@ -177,12 +181,6 @@ def add_redispatch_loads( if "sign" in loads_c.columns: del loads_c["sign"] - if "p_set" not in loads.columns: - loads["p_set"] = pd.DataFrame( - np.zeros((len(network.snapshots), len(loads.index))), - index=network.snapshots, - columns=loads.index, - ) # add loads with opposite sign (default for loads is -1). This is needed to properly model the redispatch network.madd( "Load", @@ -192,6 +190,13 @@ def add_redispatch_loads( **loads_c, ) + if "p_set" not in loads.columns: + network.loads_t["p_set"] = pd.DataFrame( + np.zeros((len(network.snapshots), len(loads.index))), + index=network.snapshots, + columns=loads.index, + ) + def add_nodal_loads( network: pypsa.Network, @@ -248,6 +253,7 @@ def add_lines(network: pypsa.Network, lines: pd.DataFrame) -> None: # setup the network add_buses(network, grid_dict["buses"]) add_lines(network, grid_dict["lines"]) + add_loads(network, grid_dict["loads"]) return network diff --git a/assume/common/outputs.py b/assume/common/outputs.py index 3d3da01f..eeb4cd71 100644 --- a/assume/common/outputs.py +++ b/assume/common/outputs.py @@ -12,8 +12,8 @@ import pandas as pd from dateutil import rrule as rr from mango import Role -from pandas.api.types import is_numeric_dtype, is_bool_dtype -from psycopg2.errors import UndefinedColumn, InvalidTextRepresentation +from pandas.api.types import is_bool_dtype, is_numeric_dtype +from psycopg2.errors import InvalidTextRepresentation, UndefinedColumn from sqlalchemy import inspect, text from sqlalchemy.exc import DataError, OperationalError, ProgrammingError @@ -314,7 +314,6 @@ def create_line(row): # now try again with self.db.begin() as db: df.to_sql(geo_table, db, if_exists="append") - def check_columns(self, table: str, df: pd.DataFrame, index: bool = True): """ diff --git a/assume/scenario/loader_amiris.py b/assume/scenario/loader_amiris.py index 20317cf0..228833fa 100644 --- a/assume/scenario/loader_amiris.py +++ b/assume/scenario/loader_amiris.py @@ -482,8 +482,6 @@ async def load_amiris_async( end += timedelta(minutes=2) sim_id = f"{scenario}_{study_case}" save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"] - print(save_interval) - #save_interval = 1 prices = {} index = pd.date_range(start=start, end=end, freq="1h", inclusive="left") world.bidding_strategies["support"] = SupportStrategy diff --git a/assume/scenario/loader_pypsa.py b/assume/scenario/loader_pypsa.py index 675fa5d2..64ae2c30 100644 --- a/assume/scenario/loader_pypsa.py +++ b/assume/scenario/loader_pypsa.py @@ -60,6 +60,8 @@ async def load_pypsa_async( columns={"bus": "node", "p_nom": "max_power"}, inplace=True ) network.loads.rename(columns={"bus": "node", "p_set": "min_power"}, inplace=True) + if "max_power" not in network.loads.columns: + network.loads["max_power"] = 0 grid_data = { "buses": network.buses, "lines": network.lines, diff --git a/docker_configs/dashboard-definitions/ASSUME_nodal.json b/docker_configs/dashboard-definitions/ASSUME_nodal.json index 919f62c9..7633d388 100644 --- a/docker_configs/dashboard-definitions/ASSUME_nodal.json +++ b/docker_configs/dashboard-definitions/ASSUME_nodal.json @@ -850,4 +850,4 @@ "uid": "nodalview", "version": 5, "weekStart": "" -} \ No newline at end of file +} diff --git a/examples/notebooks/07_interoperability.ipynb b/examples/notebooks/07_interoperability.ipynb new file mode 100644 index 00000000..2cbc0016 --- /dev/null +++ b/examples/notebooks/07_interoperability.ipynb @@ -0,0 +1,405 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "4JeBorbE6FYr" + }, + "source": [ + "# 7. Interoperability and Input-Output\n", + "\n", + "This tutorial describes how ASSUME can be used to create market simulations from energy system simulations as well as other market simulations like AMIRIS.\n", + "A broad comparison towards AMIRIS is submitted to the EEM2024.\n", + "\n", + "This tutorial describes how one can create scenarios from different input sets and use existing scenarios from it.\n", + "\n", + "\n", + "**As a whole, this tutorial covers the following**\n", + "\n", + "1. running a small scenario from CSV folder with the CLI\n", + "\n", + "2. creating a small simulation from scratch as shown in tutorial 01\n", + "\n", + "3. load a scenario from an AMIRIS scenario.yaml\n", + "\n", + "4. load a scenario from a pypsa network" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 1. Scenario from CLI" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First we need to install assume" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!pip install assume-framework" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we run in Google Colab, we need to first clone the ASSUME repository there to access the tutorial data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!git clone https://github.com/assume-framework/assume.git\n", + "!cd assume" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can use the CLI script to run a simulation - relative to the examples folder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!assume -s example_01a -c tiny -db \"sqlite:///database.db\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Protip: with argcomplete - one can create very nice tab completion for python scripts.\n", + "\n", + "Though one has to run `eval \"$(register-python-argcomplete assume)\"` once in the env before" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We did not use the postgresql database - therefore we can not use our visualization - lets fix this (if you have postgresql and grafana installed - or available through docker)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!assume -s example_01a -c base -db \"postgresql://assume:assume@localhost:5432/assume\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you have docker at hand, you can see the results on grafana:\n", + "\n", + "http://localhost:3000/?orgId=1&var-simulation=example_01a_base&from=1546300800000&to=1548892800000&refresh=5s" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Run from a script to customize scenario yourself\n", + "\n", + "This is a more advanced option - though it gives full control on what we are doing here:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import os\n", + "from datetime import datetime, timedelta\n", + "\n", + "import pandas as pd\n", + "from dateutil import rrule as rr\n", + "\n", + "from assume import World\n", + "from assume.common.forecasts import NaiveForecast\n", + "from assume.common.market_objects import MarketConfig, MarketProduct\n", + "\n", + "log = logging.getLogger(__name__)\n", + "\n", + "os.makedirs(\"./local_db\", exist_ok=True)\n", + "\n", + "db_uri = \"sqlite:///./local_db/assume_db_min_example.db\"\n", + "\n", + "world = World(database_uri=db_uri)\n", + "\n", + "start = datetime(2023, 1, 1)\n", + "end = datetime(2023, 3, 31)\n", + "index = pd.date_range(\n", + " start=start,\n", + " end=end + timedelta(hours=24),\n", + " freq=\"h\",\n", + ")\n", + "sim_id = \"world_script_simulation\"\n", + "\n", + "world.loop.run_until_complete(\n", + " world.setup(\n", + " start=start,\n", + " end=end,\n", + " save_frequency_hours=48,\n", + " simulation_id=sim_id,\n", + " index=index,\n", + " )\n", + ")\n", + "\n", + "marketdesign = [\n", + " MarketConfig(\n", + " market_id=\"EOM\",\n", + " opening_hours=rr.rrule(rr.HOURLY, interval=24, dtstart=start, until=end),\n", + " opening_duration=timedelta(hours=1),\n", + " market_mechanism=\"pay_as_clear\",\n", + " market_products=[MarketProduct(timedelta(hours=1), 24, timedelta(hours=1))],\n", + " additional_fields=[\"block_id\", \"link\", \"exclusive_id\"],\n", + " )\n", + "]\n", + "\n", + "mo_id = \"market_operator\"\n", + "world.add_market_operator(id=mo_id)\n", + "\n", + "for market_config in marketdesign:\n", + " world.add_market(market_operator_id=mo_id, market_config=market_config)\n", + "\n", + " world.add_unit_operator(\"demand_operator\")\n", + "\n", + "demand_forecast = NaiveForecast(index, demand=100)\n", + "\n", + "world.add_unit(\n", + " id=\"demand_unit\",\n", + " unit_type=\"demand\",\n", + " unit_operator_id=\"demand_operator\",\n", + " unit_params={\n", + " \"min_power\": 0,\n", + " \"max_power\": 1000,\n", + " \"bidding_strategies\": {\"EOM\": \"naive_eom\"},\n", + " \"technology\": \"demand\",\n", + " },\n", + " forecaster=demand_forecast,\n", + ")\n", + "\n", + "world.add_unit_operator(\"unit_operator\")\n", + "\n", + "nuclear_forecast = NaiveForecast(index, availability=1, fuel_price=3, co2_price=0.1)\n", + "\n", + "world.add_unit(\n", + " id=\"nuclear_unit\",\n", + " unit_type=\"power_plant\",\n", + " unit_operator_id=\"unit_operator\",\n", + " unit_params={\n", + " \"min_power\": 200,\n", + " \"max_power\": 1000,\n", + " \"bidding_strategies\": {\"EOM\": \"naive_eom\"},\n", + " \"technology\": \"nuclear\",\n", + " },\n", + " forecaster=nuclear_forecast,\n", + ")\n", + "\n", + "world.run()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 3. Load AMIRIS scenario\n", + "\n", + "First we need to download the examples repository from amiris" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!cd .. && git clone https://gitlab.com/dlr-ve/esy/amiris/examples.git amiris-examples" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have the repository at the right place, we can run the amiris scenario:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from assume.scenario.loader_amiris import load_amiris_async\n", + "from assume import World\n", + "\n", + "scenario = \"Simple\" # Germany20{15-19}, Austria2019 or Simple\n", + "base_path = f\"../amiris-examples/{scenario}/\"\n", + "\n", + "db_uri = \"postgresql://assume:assume@localhost:5432/assume\"\n", + "# db_uri = \"sqlite:///test.db\"\n", + "world = World(database_uri=db_uri)\n", + "world.loop.run_until_complete(\n", + " load_amiris_async(\n", + " world,\n", + " \"amiris\",\n", + " scenario.lower(),\n", + " base_path,\n", + " )\n", + ")\n", + "print(f\"did load {scenario} - now simulating\")\n", + "world.run()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now look at the results here:\n", + "\n", + "http://localhost:3000/d/mQ3Lvkr4k/assume3a-main-overview?orgId=1&var-simulation=amiris_simple&from=1609459200000&to=1609545600000&refresh=5s" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 4. Load PyPSA scenario" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pypsa\n", + "\n", + "# python-dateutil\n", + "from dateutil import rrule as rr\n", + "from datetime import timedelta\n", + "from assume import World, MarketConfig, MarketProduct\n", + "from assume.scenario.loader_pypsa import load_pypsa_async\n", + "\n", + "db_uri = \"postgresql://assume:assume@localhost:5432/assume\"\n", + "world = World(database_uri=db_uri)\n", + "scenario = \"world_pypsa\"\n", + "study_case = \"ac_dc_meshed\"\n", + "# \"pay_as_clear\", \"redispatch\" or \"nodal\"\n", + "market_mechanism = \"pay_as_clear\"\n", + "\n", + "network = pypsa.examples.ac_dc_meshed(from_master=True)\n", + "# network = pypsa.examples.storage_hvdc(True)\n", + "# network = pypsa.examples.scigrid_de(True, from_master=True)\n", + "\n", + "start = network.snapshots[0]\n", + "end = network.snapshots[-1]\n", + "marketdesign = [\n", + " MarketConfig(\n", + " \"EOM\",\n", + " rr.rrule(rr.HOURLY, interval=1, dtstart=start, until=end),\n", + " timedelta(hours=1),\n", + " market_mechanism,\n", + " [MarketProduct(timedelta(hours=1), 1, timedelta(hours=1))],\n", + " additional_fields=[\"node\", \"max_power\", \"min_power\"],\n", + " maximum_bid_volume=1e9,\n", + " maximum_bid_price=1e9,\n", + " )\n", + "]\n", + "default_strategies = {\n", + " mc.market_id: (\n", + " \"naive_redispatch\" if mc.market_mechanism == \"redispatch\" else \"naive_eom\"\n", + " )\n", + " for mc in marketdesign\n", + "}\n", + "from collections import defaultdict\n", + "\n", + "bidding_strategies = defaultdict(lambda: default_strategies)\n", + "\n", + "world.loop.run_until_complete(\n", + " load_pypsa_async(\n", + " world, scenario, study_case, network, marketdesign, bidding_strategies\n", + " )\n", + ")\n", + "world.run()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "colab": { + "include_colab_link": true, + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "assume-framework", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + }, + "nbsphinx": { + "execute": "never" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/notebooks/07_interoperability.ipynb.license b/examples/notebooks/07_interoperability.ipynb.license new file mode 100644 index 00000000..a6ae0636 --- /dev/null +++ b/examples/notebooks/07_interoperability.ipynb.license @@ -0,0 +1,3 @@ +SPDX-FileCopyrightText: ASSUME Developers + +SPDX-License-Identifier: AGPL-3.0-or-later From 3b729e0163aa9d55097823f5cab1dad361ae5c1d Mon Sep 17 00:00:00 2001 From: Nick Harder Date: Wed, 20 Mar 2024 10:17:22 +0100 Subject: [PATCH 7/8] -fix grid utils -add tutorial 7 to the documentation -fix levels in tutorial --- assume/common/grid_utils.py | 2 +- docs/source/examples/07_interoperability_example.nblink | 1 + .../examples/07_interoperability_example.nblink.license | 3 +++ docs/source/examples_basic.rst | 1 + examples/notebooks/07_interoperability.ipynb | 6 +++--- 5 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 docs/source/examples/07_interoperability_example.nblink create mode 100644 docs/source/examples/07_interoperability_example.nblink.license diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index d3f3e211..5c53fdfe 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -40,7 +40,7 @@ def add_generators( names=generators.index, bus=generators["node"], # bus to which the generator is connected to p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator - **generators, + **gen_c, ) diff --git a/docs/source/examples/07_interoperability_example.nblink b/docs/source/examples/07_interoperability_example.nblink new file mode 100644 index 00000000..d4c2779a --- /dev/null +++ b/docs/source/examples/07_interoperability_example.nblink @@ -0,0 +1 @@ +{"path": "../../../examples/notebooks/07_interoperability.ipynb"} diff --git a/docs/source/examples/07_interoperability_example.nblink.license b/docs/source/examples/07_interoperability_example.nblink.license new file mode 100644 index 00000000..a6ae0636 --- /dev/null +++ b/docs/source/examples/07_interoperability_example.nblink.license @@ -0,0 +1,3 @@ +SPDX-FileCopyrightText: ASSUME Developers + +SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/docs/source/examples_basic.rst b/docs/source/examples_basic.rst index 9d7bde42..2a547b75 100644 --- a/docs/source/examples_basic.rst +++ b/docs/source/examples_basic.rst @@ -20,3 +20,4 @@ Here you can find several tutorials on how to use ASSUME framework to get you st examples/04_reinforcement_learning_example.nblink examples/05_market_comparison.nblink examples/06_advanced_orders_example.nblink + examples/07_interoperability_example.nblink diff --git a/examples/notebooks/07_interoperability.ipynb b/examples/notebooks/07_interoperability.ipynb index 2cbc0016..3cb21e52 100644 --- a/examples/notebooks/07_interoperability.ipynb +++ b/examples/notebooks/07_interoperability.ipynb @@ -29,7 +29,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# 1. Scenario from CLI" + "## 1. Scenario from CLI" ] }, { @@ -242,7 +242,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# 3. Load AMIRIS scenario\n", + "## 3. Load AMIRIS scenario\n", "\n", "First we need to download the examples repository from amiris" ] @@ -307,7 +307,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# 4. Load PyPSA scenario" + "## 4. Load PyPSA scenario" ] }, { From ede5f9777a4660bbd282d9d98c98d08283fe80ce Mon Sep 17 00:00:00 2001 From: Nick Harder Date: Wed, 20 Mar 2024 10:18:55 +0100 Subject: [PATCH 8/8] -remove loading loads with network --- assume/common/grid_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/assume/common/grid_utils.py b/assume/common/grid_utils.py index 5c53fdfe..e2c7ebec 100644 --- a/assume/common/grid_utils.py +++ b/assume/common/grid_utils.py @@ -253,8 +253,6 @@ def add_lines(network: pypsa.Network, lines: pd.DataFrame) -> None: # setup the network add_buses(network, grid_dict["buses"]) add_lines(network, grid_dict["lines"]) - add_loads(network, grid_dict["loads"]) - return network