Skip to content

Commit

Permalink
Merge pull request #323 from assume-framework/interoperability_tutorial
Browse files Browse the repository at this point in the history
Interoperability tutorial and fixes for grid representation

fixes #314
  • Loading branch information
maurerle authored Mar 20, 2024
2 parents af00804 + ede5f97 commit 6a892b9
Show file tree
Hide file tree
Showing 19 changed files with 557 additions and 254 deletions.
5 changes: 4 additions & 1 deletion assume/common/forecasts.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,10 @@ def set_forecast(self, data: pd.DataFrame | pd.Series | None, prefix=""):
self.forecasts[column] = data[column].item()
else:
# Add new columns to the existing DataFrame, overwriting any existing columns with the same names
self.forecasts = self.forecasts.assign(**data)
new_columns = set(data.columns) - set(self.forecasts.columns)
self.forecasts = pd.concat(
[self.forecasts, data[list(new_columns)]], axis=1
)
else:
self.forecasts[prefix + data.name] = data

Expand Down
74 changes: 39 additions & 35 deletions assume/common/grid_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,20 @@ def add_generators(
index=network.snapshots,
columns=generators.index,
)

gen_c = generators.copy()
if "p_min_pu" not in gen_c.columns:
gen_c["p_min_pu"] = p_set
if "p_max_pu" not in gen_c.columns:
gen_c["p_max_pu"] = p_set + 1
if "marginal_cost" not in gen_c.columns:
gen_c["marginal_cost"] = p_set
# add generators
network.madd(
"Generator",
names=generators.index,
bus=generators["node"], # bus to which the generator is connected to
p_nom=generators["max_power"], # Nominal capacity of the powerplant/generator
p_min_pu=p_set,
p_max_pu=p_set + 1,
marginal_cost=p_set,
**generators,
**gen_c,
)


Expand Down Expand Up @@ -150,21 +153,22 @@ def add_loads(
network (pypsa.Network): the pypsa network to which the loads are
loads (pandas.DataFrame): the loads dataframe
"""
p_set = pd.DataFrame(
np.zeros((len(network.snapshots), len(loads.index))),
index=network.snapshots,
columns=loads.index,
)

# add loads
network.madd(
"Load",
names=loads.index,
bus=loads["node"], # bus to which the generator is connected to
p_set=p_set,
**loads,
)

if "p_set" not in loads.columns:
network.loads_t["p_set"] = pd.DataFrame(
np.zeros((len(network.snapshots), len(loads.index))),
index=network.snapshots,
columns=loads.index,
)


def add_redispatch_loads(
network: pypsa.Network,
Expand All @@ -173,23 +177,26 @@ def add_redispatch_loads(
"""
This adds loads to the redispatch PyPSA network with respective bus data to which they are connected
"""
loads_c = loads.copy()
if "sign" in loads_c.columns:
del loads_c["sign"]

p_set = pd.DataFrame(
np.zeros((len(network.snapshots), len(loads.index))),
index=network.snapshots,
columns=loads.index,
)

# add loads with opposite sing (default for loads is -1). This is needed to properly model the redispatch
# add loads with opposite sign (default for loads is -1). This is needed to properly model the redispatch
network.madd(
"Load",
names=loads.index,
bus=loads["node"], # bus to which the generator is connected to
p_set=p_set,
sign=1,
**loads,
**loads_c,
)

if "p_set" not in loads.columns:
network.loads_t["p_set"] = pd.DataFrame(
np.zeros((len(network.snapshots), len(loads.index))),
index=network.snapshots,
columns=loads.index,
)


def add_nodal_loads(
network: pypsa.Network,
Expand All @@ -200,12 +207,15 @@ def add_nodal_loads(
The loads are added as generators with negative sign so their dispatch can be also curtailed,
since regular load in PyPSA represents only an inelastic demand.
"""

p_set = pd.DataFrame(
np.zeros((len(network.snapshots), len(loads.index))),
index=network.snapshots,
columns=loads.index,
)
loads_c = loads.copy()

if "sign" in loads_c.columns:
del loads_c["sign"]

# add loads as negative generators
network.madd(
Expand All @@ -217,7 +227,7 @@ def add_nodal_loads(
p_max_pu=p_set + 1,
marginal_cost=p_set,
sign=-1,
**loads,
**loads_c,
)


Expand All @@ -235,23 +245,14 @@ def read_pypsa_grid(
"""

def add_buses(network: pypsa.Network, buses: pd.DataFrame) -> None:
network.madd(
"Bus",
names=buses.index,
**buses,
)
network.import_components_from_dataframe(buses, "Bus")

def add_lines(network: pypsa.Network, lines: pd.DataFrame) -> None:
network.madd(
"Line",
names=lines.index,
**lines,
)
network.import_components_from_dataframe(lines, "Line")

# setup the network
add_buses(network, grid_dict["buses"])
add_lines(network, grid_dict["lines"])

return network


Expand Down Expand Up @@ -286,7 +287,10 @@ def calculate_network_meta(network, product: MarketProduct, i: int):

supply_volume = dispatch_for_bus[dispatch_for_bus > 0].sum()
demand_volume = dispatch_for_bus[dispatch_for_bus < 0].sum()
price = network.buses_t.marginal_price[bus].iat[i]
if not network.buses_t.marginal_price.empty:
price = network.buses_t.marginal_price[str(bus)].iat[i]
else:
price = 0

meta.append(
{
Expand All @@ -295,7 +299,7 @@ def calculate_network_meta(network, product: MarketProduct, i: int):
"demand_volume_energy": demand_volume * duration_hours,
"supply_volume_energy": supply_volume * duration_hours,
"price": price,
"node_id": bus,
"node": bus,
"product_start": product[0],
"product_end": product[1],
"only_hours": product[2],
Expand Down
49 changes: 20 additions & 29 deletions assume/common/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
import pandas as pd
from dateutil import rrule as rr
from mango import Role
from pandas.api.types import is_numeric_dtype
from psycopg2.errors import UndefinedColumn
from pandas.api.types import is_bool_dtype, is_numeric_dtype
from psycopg2.errors import InvalidTextRepresentation, UndefinedColumn
from sqlalchemy import inspect, text
from sqlalchemy.exc import DataError, OperationalError, ProgrammingError

Expand Down Expand Up @@ -306,32 +306,12 @@ def create_line(row):
df.reset_index()

try:
# try to use geopandas
# needed for postGIS writing
import geoalchemy2
import geopandas as gpd
from shapely.wkt import loads

def load_wkt(string: str):
return loads(string.split(";")[1])

df["geometry"] = df["wkt_srid_4326"].apply(load_wkt)
df = gpd.GeoDataFrame(df, geometry="geometry")
df.set_crs(crs="EPSG:4326", inplace=True)
# postgis does not lowercase tablenames
df.columns = map(str.lower, df.columns)
try:
# try to input as geodataframe
with self.db.begin() as db:
df.to_postgis(geo_table, db, if_exists="append", index=True)
except (ProgrammingError, OperationalError, DataError, UndefinedColumn):
# if a column is missing, check and try again
self.check_columns(geo_table, df)
# now try again
with self.db.begin() as db:
df.to_postgis(geo_table, db, if_exists="append", index=True)
except ImportError:
# otherwise, just use plain SQL anyway
with self.db.begin() as db:
df.to_sql(geo_table, db, if_exists="append")
except (ProgrammingError, OperationalError, DataError, UndefinedColumn):
# if a column is missing, check and try again
self.check_columns(geo_table, df)
# now try again
with self.db.begin() as db:
df.to_sql(geo_table, db, if_exists="append")

Expand All @@ -352,7 +332,12 @@ def check_columns(self, table: str, df: pd.DataFrame, index: bool = True):
if column.lower() not in db_columns:
try:
# TODO this only works for float and text
column_type = "float" if is_numeric_dtype(df[column]) else "text"
if is_bool_dtype(df[column]):
column_type = "boolean"
elif is_numeric_dtype(df[column]):
column_type = "float"
else:
column_type = "text"
query = f"ALTER TABLE {table} ADD COLUMN {column} {column_type}"
with self.db.begin() as db:
db.execute(text(query))
Expand Down Expand Up @@ -411,6 +396,12 @@ def write_market_orders(self, market_orders: any, market_id: str):
del df["only_hours"]
del df["agent_id"]

if "bid_type" not in df.columns:
df["bid_type"] = None

if "node" not in df.columns:
df["node"] = None

df["simulation"] = self.simulation_id
df["market_id"] = market_id

Expand Down
4 changes: 3 additions & 1 deletion assume/markets/base_market.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,9 @@ def setup(self):
self.marketconfig.aid = self.context.aid

for field in self.required_fields:
assert field in self.marketconfig.additional_fields, "missing field"
assert (
field in self.marketconfig.additional_fields
), f"{field} missing from additional_fiels"

def accept_orderbook(content: OrderBookMessage, meta: MetaDict):
if not isinstance(content, dict):
Expand Down
2 changes: 1 addition & 1 deletion assume/markets/clearing_algorithms/nodal_pricing.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class NodalMarketRole(MarketRole):
"""

required_fields = ["node"]
required_fields = ["node", "max_power", "min_power"]

def __init__(self, marketconfig: MarketConfig):
super().__init__(marketconfig)
Expand Down
2 changes: 1 addition & 1 deletion assume/markets/clearing_algorithms/redispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class RedispatchMarketRole(MarketRole):
"""

required_fields = ["node"]
required_fields = ["node", "max_power", "min_power"]

def __init__(self, marketconfig: MarketConfig):
super().__init__(marketconfig)
Expand Down
13 changes: 7 additions & 6 deletions assume/scenario/loader_amiris.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def add_agent_to_world(
clearing_section["DistributionMethod"]
],
market_products=[
MarketProduct(timedelta(hours=1), 24, timedelta(hours=1))
MarketProduct(timedelta(hours=1), 24, timedelta(hours=0))
],
maximum_bid_volume=1e6,
)
Expand Down Expand Up @@ -471,16 +471,17 @@ async def load_amiris_async(
amiris_scenario = read_amiris_yaml(base_path)
# DeliveryIntervalInSteps = 3600
# In practice - this seems to be a fixed number in AMIRIS
start = amiris_scenario["GeneralProperties"]["Simulation"]["StartTime"]
start = pd.to_datetime(start, format="%Y-%m-%d_%H:%M:%S")
simulation = amiris_scenario["GeneralProperties"]["Simulation"]
start = pd.to_datetime(simulation["StartTime"], format="%Y-%m-%d_%H:%M:%S")
if calendar.isleap(start.year):
# AMIRIS does not considerate leap years
start += timedelta(days=1)
end = amiris_scenario["GeneralProperties"]["Simulation"]["StopTime"]
end = pd.to_datetime(end, format="%Y-%m-%d_%H:%M:%S")
end = pd.to_datetime(simulation["StopTime"], format="%Y-%m-%d_%H:%M:%S")
# AMIRIS caveat: start and end is always two minutes before actual start
start += timedelta(minutes=2)
end += timedelta(minutes=2)
sim_id = f"{scenario}_{study_case}"
save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"] // 4
save_interval = amiris_scenario["GeneralProperties"]["Output"]["Interval"]
prices = {}
index = pd.date_range(start=start, end=end, freq="1h", inclusive="left")
world.bidding_strategies["support"] = SupportStrategy
Expand Down
24 changes: 17 additions & 7 deletions assume/scenario/loader_pypsa.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,20 @@ async def load_pypsa_async(

mo_id = "market_operator"
world.add_market_operator(id=mo_id)

network.generators.rename(
columns={"bus": "node", "p_nom": "max_power"}, inplace=True
)
network.loads.rename(columns={"bus": "node", "p_set": "min_power"}, inplace=True)
if "max_power" not in network.loads.columns:
network.loads["max_power"] = 0
grid_data = {
"buses": network.buses,
"lines": network.lines,
"generators": network.generators,
"loads": network.loads,
}

for market_config in marketdesign:
market_config.param_dict["grid_data"] = grid_data
world.add_market(mo_id, market_config)
Expand All @@ -74,7 +82,7 @@ async def load_pypsa_async(

unit_type = "power_plant"

max_power = generator.p_nom or 1000
max_power = generator.max_power or 1000
# if p_nom is not set, generator.p_nom_extendable must be
ramp_up = generator.ramp_limit_start_up * max_power
ramp_down = generator.ramp_limit_shut_down * max_power
Expand All @@ -86,8 +94,8 @@ async def load_pypsa_async(
"min_power": generator.p_nom_min,
"max_power": max_power,
"bidding_strategies": bidding_strategies[generator.name],
"technology": "demand",
"node": generator.bus,
"technology": "conventional",
"node": generator.node,
"efficiency": generator.efficiency,
"fuel_type": generator.carrier,
"ramp_up": ramp_up,
Expand Down Expand Up @@ -120,7 +128,7 @@ async def load_pypsa_async(
"max_power": load_t.max(),
"bidding_strategies": bidding_strategies[load.name],
"technology": "demand",
"node": load.bus,
"node": load.node,
"price": 1e3,
},
NaiveForecast(index, demand=load_t),
Expand Down Expand Up @@ -163,8 +171,8 @@ async def load_pypsa_async(
world = World(database_uri=db_uri)
scenario = "world_pypsa"
study_case = "scigrid_de"
# "pay_as_clear" or "redispatch"
market_mechanism = "redispatch"
# "pay_as_clear", "redispatch" or "nodal"
market_mechanism = "nodal"

match study_case:
case "ac_dc_meshed":
Expand All @@ -177,6 +185,8 @@ async def load_pypsa_async(
print("invalid studycase")
network = pd.DataFrame()

study_case += market_mechanism

start = network.snapshots[0]
end = network.snapshots[-1]
marketdesign = [
Expand All @@ -186,7 +196,7 @@ async def load_pypsa_async(
timedelta(hours=1),
market_mechanism,
[MarketProduct(timedelta(hours=1), 1, timedelta(hours=1))],
additional_fields=["node"],
additional_fields=["node", "max_power", "min_power"],
maximum_bid_volume=1e9,
maximum_bid_price=1e9,
)
Expand Down
Loading

0 comments on commit 6a892b9

Please sign in to comment.