Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
f955b58
- adjusted example to have nodal pricing and redispatch
kim-mskw Nov 20, 2025
d7396e6
- simplified three node example in example_01d base
kim-mskw Dec 2, 2025
5400f08
include s_max_pu in lines.csv
gugrimm Dec 3, 2025
02eb7cc
function to calculate PTDF using PyPSA
gugrimm Dec 3, 2025
6570802
add s_max_pu to line loading calculation in redispatch
gugrimm Dec 3, 2025
0acd5d4
calculate OPF with PTDF: changes to energy balance and transmission c…
gugrimm Dec 3, 2025
659aedf
code snippet to extract dispatch directly from pyomo model
gugrimm Dec 3, 2025
9ac2999
fix error in complex clearing
gugrimm Dec 4, 2025
99287c1
revert "fix error in complex clearing"
gugrimm Dec 8, 2025
1e3b461
Revert "code snippet to extract dispatch directly from pyomo model"
gugrimm Dec 8, 2025
5e0cf9c
Revert "calculate OPF with PTDF: changes to energy balance and transm…
gugrimm Dec 8, 2025
5bbf454
Revert "function to calculate PTDF using PyPSA"
gugrimm Dec 8, 2025
4f2f94f
print warning if 'x' in lines.csv - 'x' not used
gugrimm Dec 8, 2025
7fe808d
add additional fields for nodal clearing in config
gugrimm Dec 8, 2025
17dadad
introduce nodal clearing using PyPSA
gugrimm Dec 8, 2025
a9caf01
- revert example changes for PR
kim-mskw Dec 10, 2025
c02fe21
- fix flow logging of nodal clearing
kim-mskw Dec 10, 2025
e712ae4
added description of NTC based clearing to ComplexClearingRole
gugrimm Dec 16, 2025
2180e0f
integrated storage units bids into NodalClearing
gugrimm Dec 16, 2025
8a91ae3
added orderbook validation
gugrimm Dec 17, 2025
39eb3ff
deleted double validation of volumes in orderbook
gugrimm Dec 17, 2025
22177c3
extra example for nodal clearing
gugrimm Dec 17, 2025
557f7c9
update installation doku with hint on NTC based and nodal clearing
gugrimm Dec 17, 2025
2a20616
print warning "this is a NTC based clearing" earlier, in ComplexClear…
gugrimm Dec 17, 2025
2e401e0
add storages to nodal_clearing
gugrimm Dec 22, 2025
1e1ac85
nodal clearing: test for 2h clearing and 1h clearing with storage
gugrimm Dec 22, 2025
404ff59
avoid KeyError when units did not bid but are present in network
gugrimm Dec 22, 2025
496ecf0
adjusted config from EOM to nodal to match bidding_market pattern of …
gugrimm Dec 22, 2025
eaad9ef
avoid storages present in grid data to bid to nodal market with produ…
gugrimm Dec 22, 2025
5f7372a
set count to 24 again
gugrimm Dec 22, 2025
754039d
adjust opening freq
gugrimm Dec 22, 2025
949a75d
clean up and silence pandas future_stack warning
gugrimm Dec 22, 2025
af3acc1
documentation and release notes
gugrimm Dec 22, 2025
ffcf5b8
removed storage.csv from example 01d
gugrimm Dec 22, 2025
cdfc107
Merge branch 'main' into redispacth_and_nodal_pricing_bug
gugrimm Dec 22, 2025
47a6e76
formatting by pre-commit
gugrimm Dec 22, 2025
3f18d15
Merge branch 'redispacth_and_nodal_pricing_bug' of https://github.com…
gugrimm Dec 22, 2025
fc9ab70
pre-commit
gugrimm Dec 22, 2025
2ddc9f1
Merge branch 'main' of https://github.com/assume-framework/assume int…
kim-mskw Dec 23, 2025
4f332ef
- revert to old base example configuration
kim-mskw Dec 23, 2025
45e8064
- add limitation about market clearing with multiple products
kim-mskw Dec 23, 2025
e631ef6
add check if .csv are present in loader_csv
gugrimm Dec 23, 2025
62451f9
pre-commit
gugrimm Dec 23, 2025
b9bba0a
adjust test
gugrimm Dec 23, 2025
d8f975c
tiny typo
kim-mskw Dec 23, 2025
dd97a27
Merge branch 'redispacth_and_nodal_pricing_bug' of https://github.com…
kim-mskw Dec 23, 2025
08b36e9
ruff
gugrimm Dec 23, 2025
4a7a4d1
Merge remote-tracking branch 'origin' into redispacth_and_nodal_prici…
kim-mskw Dec 23, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
<!-- SPDX-FileCopyrightText: ASSUME Developers -->
<!--
SPDX-FileCopyrightText: ASSUME Developers

SPDX-License-Identifier: AGPL-3.0-or-later
-->

<!-- SPDX-License-Identifier: AGPL-3.0-or-later -->
## Related Issue
Closes #<issue‑number>
Expand Down
3 changes: 2 additions & 1 deletion assume/markets/clearing_algorithms/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
# try importing pypsa if it is installed
try:
from .redispatch import RedispatchMarketRole

from .nodal_clearing import NodalClearingRole
clearing_mechanisms["redispatch"] = RedispatchMarketRole
clearing_mechanisms["nodal_clearing"] = NodalClearingRole
except ImportError:
pass
12 changes: 11 additions & 1 deletion assume/markets/clearing_algorithms/complex_clearing.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,20 @@ def energy_balance_rule(model, node, t):
)

if incidence_matrix is not None:
if 'x' in lines.columns:
print("Warning: 'lines.csv' contains reactances 'x' but this clearing is based on Net Transfer Capacities only (Transport model). Use 'nodal_clearing' to include a linear OPF.")
# add NTCs
model.transmission_constr = pyo.ConstraintList()
for t in model.T:
for line in model.lines:
capacity = lines.at[line, "s_nom"]
# s_max_pu might also be time variant. but for now we assume it is static
s_max_pu = (
lines.at[line, "s_max_pu"]
if "s_max_pu" in lines.columns and not
pd.isna(lines.at[line, "s_max_pu"])
else 1.0
)
capacity = lines.at[line, "s_nom"] * s_max_pu
# Limit the flow on each line
model.transmission_constr.add(model.flows[t, line] <= capacity)
model.transmission_constr.add(model.flows[t, line] >= -capacity)
Expand Down
306 changes: 306 additions & 0 deletions assume/markets/clearing_algorithms/nodal_clearing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,306 @@
# SPDX-FileCopyrightText: ASSUME Developers
#
# SPDX-License-Identifier: AGPL-3.0-or-later

import logging
from datetime import timedelta
from operator import itemgetter

import pandas as pd
import pypsa

from assume.common.market_objects import MarketConfig, MarketProduct, Orderbook
from assume.common.utils import create_incidence_matrix
from assume.markets.base_market import MarketRole

from assume.common.grid_utils import read_pypsa_grid

logger = logging.getLogger(__name__)

# Set the log level to WARNING
logging.getLogger("linopy").setLevel(logging.WARNING)
logging.getLogger("pypsa").setLevel(logging.WARNING)

def calculate_meta(accepted_demand_orders, accepted_supply_orders, product):
supply_volume = sum(map(itemgetter("accepted_volume"), accepted_supply_orders))
demand_volume = -sum(map(itemgetter("accepted_volume"), accepted_demand_orders))
prices = list(map(itemgetter("accepted_price"), accepted_supply_orders)) or [0]
# can also be self.marketconfig.maximum_bid..?
duration_hours = (product[1] - product[0]) / timedelta(hours=1)
avg_price = 0
if supply_volume:
weighted_price = [
order["accepted_volume"] * order["accepted_price"]
for order in accepted_supply_orders
]
avg_price = sum(weighted_price) / supply_volume
return {
"supply_volume": supply_volume,
"demand_volume": demand_volume,
"demand_volume_energy": demand_volume * duration_hours,
"supply_volume_energy": supply_volume * duration_hours,
"price": avg_price,
"max_price": max(prices),
"min_price": min(prices),
"node": None,
"product_start": product[0],
"product_end": product[1],
"only_hours": product[2],
}

class NodalClearingRole(MarketRole):
"""
This class implements a nodal market clearing mechanism using a linear optimal power flow (OPF) approach.
"""
required_fields = ["node", "max_power"]

def __init__(self, marketconfig: MarketConfig):
super().__init__(marketconfig)

self.network = pypsa.Network()

if not self.grid_data:
logger.error(f"Market '{marketconfig.market_id}': grid_data is missing.")
raise ValueError("grid_data is missing.")

# Define grid data
self.nodes = ["node0"]
self.incidence_matrix = None

self.lines = self.grid_data["lines"]
buses = self.grid_data["buses"]

self.zones_id = self.marketconfig.param_dict.get("zones_identifier")
self.node_to_zone = None

# Generate the incidence matrix and set the nodes based on zones or individual buses
if self.zones_id:
# Zonal Case
self.incidence_matrix = create_incidence_matrix(
self.lines, buses, zones_id=self.zones_id
)
self.nodes = buses[self.zones_id].unique()
self.node_to_zone = buses[self.zones_id].to_dict()
else:
# Nodal Case
self.incidence_matrix = create_incidence_matrix(self.lines, buses)
self.nodes = buses.index.values

self.log_flows = self.marketconfig.param_dict.get("log_flows", False)
self.pricing_mechanism = self.marketconfig.param_dict.get(
"pricing_mechanism", "pay_as_clear"
)
if self.pricing_mechanism not in ["pay_as_bid", "pay_as_clear"]:
logger.error(
f"Market '{marketconfig.market_id}': Invalid payment mechanism '{self.pricing_mechanism}'."
)
raise ValueError("Invalid payment mechanism.")

read_pypsa_grid(
network=self.network,
grid_dict=self.grid_data,
)

# add all units to the PyPSA network as generators with p_nom their absolute max power
# generators have p_min_pu - p_max_pu 0 to 1
self.network.add("Generator",
self.grid_data['generators'].index,
bus=self.grid_data['generators']['node'],
p_nom=self.grid_data['generators']['max_power'],
p_min_pu=0,
p_max_pu=1,
)
# demand units have p_min_pu - p_max_pu -1 to 0
self.network.add("Generator",
self.grid_data['loads'].index,
bus=self.grid_data['loads']['node'],
p_nom=self.grid_data['loads']['max_power'],
p_min_pu=-1,
p_max_pu=0,
)
# storage units
# TODO: how are storages included in grid data dict?
# also add them as generators, as we only regard bids here

self.solver = marketconfig.param_dict.get("solver", "highs")
if self.solver == "gurobi":
self.solver_options = {"LogToConsole": 0, "OutputFlag": 0}
elif self.solver == "highs":
self.solver_options = {"output_flag": False, "log_to_console": False}
else:
self.solver_options = {}

def clear(
self, orderbook: Orderbook, market_products
) -> tuple[Orderbook, Orderbook, list[dict], dict[tuple, float]]:
"""
Performs nodal clearing based on optimal linear power flow.
The returned orderbook contains accepted orders with the accepted volumes and prices.

Args:
orderbook (Orderbook): The orderbook to be cleared.
market_products (list[MarketProduct]): The products for which clearing happens.

Returns:
Tuple[Orderbook, Orderbook, List[dict]]: The accepted orderbook, rejected orderbook and market metadata.
"""

if len(orderbook) == 0:
return super().clear(orderbook, market_products)
orderbook_df = pd.DataFrame(orderbook)
orderbook_df["accepted_volume"] = 0.0
orderbook_df["accepted_price"] = 0.0
# snapshots = range(self.marketconfig.market_products[0].count)
snapshots = pd.date_range(
start=market_products[0][0], # start time
end=market_products[-1][0], # end time
freq=self.marketconfig.market_products[0].duration)

# Now you can pivot the DataFrame
volume_pivot = orderbook_df.pivot(
index="start_time", columns="unit_id", values="volume"
)
#volume_pivot.index = snapshots
price_pivot = orderbook_df.pivot(
index="start_time", columns="unit_id", values="price"
)
#price_pivot.index = snapshots
# Copy the network
n = self.network.copy()

# set snapshots as the products start time, end time and freq = duration of the products
# snapshots = pd.date_range(self.marketconfig.opening_hours._dtstart,
# self.marketconfig.opening_hours._until,
# freq=self.marketconfig.market_products[0].duration)
n.set_snapshots(snapshots)

# Update p_max_pu for all units based on their bids in the actual snapshots
# generators
gen_idx = self.grid_data['generators'].index
n.generators_t.p_max_pu.loc[snapshots, gen_idx] = volume_pivot[gen_idx] / n.generators.loc[gen_idx, 'p_nom'].values
n.generators_t.marginal_cost.loc[snapshots, gen_idx] = price_pivot[gen_idx]
# demand
demand_idx = self.grid_data['loads'].index
n.generators_t.p_min_pu.loc[snapshots, demand_idx] = volume_pivot[demand_idx] / n.generators.loc[demand_idx, 'p_nom'].values
n.generators_t.marginal_cost.loc[snapshots, demand_idx] = price_pivot[demand_idx]

# run linear optimal powerflow
n.optimize.fix_optimal_capacities()
n.optimize(#snapshots=volume_pivot.index,
solver=self.solver,
solver_options=self.solver_options,
)

# Find intersection of unit_ids in orderbook_df and columns in n.generators_t.p
valid_units = orderbook_df["unit_id"].unique()
dispatch = n.generators_t.p

for unit in valid_units:
if unit in dispatch.columns:
# get accepted volume and price for each time snapshot
accepted_volumes = dispatch[unit]
if self.pricing_mechanism == "pay_as_clear":
accepted_prices = n.buses_t.marginal_price.loc[:, n.generators.loc[unit, 'bus']]
elif self.pricing_mechanism == "pay_as_bid":
accepted_prices = price_pivot[unit]
else:
raise ValueError("Invalid pricing mechanism.")

# update orderbook_df with accepted volumes and prices
for t, (vol, price) in enumerate(zip(accepted_volumes, accepted_prices)):
mask = (orderbook_df["unit_id"] == unit) & (orderbook_df["start_time"] == snapshots[t])
orderbook_df.loc[mask, "accepted_volume"] = vol
orderbook_df.loc[mask, "accepted_price"] = price

# return orderbook_df back to orderbook format as list of dicts
accepted_orders = orderbook_df[orderbook_df["accepted_volume"] != 0].to_dict(
"records"
)
rejected_orders = orderbook_df[orderbook_df["accepted_volume"] == 0].to_dict(
"records"
)
market_clearing_prices = n.buses_t.marginal_price.to_dict()

meta = []
flows = {}

accepted_orders, rejected_orders, meta, flows = extract_results(
network=n,
accepted_orders=accepted_orders,
rejected_orders=rejected_orders,
market_products=market_products,
market_clearing_prices=market_clearing_prices,
log_flows=self.log_flows,
)

return accepted_orders, rejected_orders, meta, flows


def extract_results(
network: pypsa.Network,
accepted_orders: Orderbook,
rejected_orders: Orderbook,
market_products: list[MarketProduct],
market_clearing_prices: dict,
log_flows: bool = False,
):
"""
Extracts the results of the market clearing from the solved PyPSA model.

Args:
network (pypsa.Network): The PyPSA network after solving the market clearing.
accepted_orders (Orderbook): List of the accepted orders
rejected_orders (Orderbook): List of the rejected orders
market_products (list[MarketProduct]): The products to be traded
market_clearing_prices (dict): The market clearing prices
log_flows (bool): Whether to log network flows

Returns:
tuple[Orderbook, Orderbook, list[dict], dict]: The accepted orders, rejected orders, meta information, and network flows

"""
meta = []
supply_volume_dict = {node: {t: 0.0 for t in network.snapshots} for node in network.buses.index}
demand_volume_dict = {node: {t: 0.0 for t in network.snapshots} for node in network.buses.index}

for order in accepted_orders:
node = order["node"]
t = order["start_time"]
if order["accepted_volume"] > 0:
supply_volume_dict[node][t] += order["accepted_volume"]
else:
demand_volume_dict[node][t] += order["accepted_volume"]


# write the meta information for each hour of the clearing period
for node in market_clearing_prices.keys():
for product in market_products:
t = product[0]
clear_price = market_clearing_prices[node][t]
supply_volume = supply_volume_dict[node][t]
demand_volume = demand_volume_dict[node][t]
duration_hours = (product[1] - product[0]) / timedelta(hours=1)

meta.append(
{
"supply_volume": supply_volume,
"demand_volume": -demand_volume,
"demand_volume_energy": -demand_volume * duration_hours,
"supply_volume_energy": supply_volume * duration_hours,
"price": clear_price,
"max_price": clear_price,
"min_price": clear_price,
"node": node,
"product_start": product[0],
"product_end": product[1],
"only_hours": product[2],
}
)

flows = {}
if log_flows:
# extract flows
flows = network.lines_t.p0
# flows = network.lines_t.p0.to_dict()

return accepted_orders, rejected_orders, meta, flows
2 changes: 1 addition & 1 deletion assume/markets/clearing_algorithms/redispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def clear(

# check lines for congestion where power flow is larger than s_nom
line_loading = (
redispatch_network.lines_t.p0.abs() / redispatch_network.lines.s_nom
redispatch_network.lines_t.p0.abs() / (redispatch_network.lines.s_nom * redispatch_network.lines.s_max_pu)
)

# if any line is congested, perform redispatch
Expand Down
4 changes: 2 additions & 2 deletions examples/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,10 @@

# select to store the simulation results in a local database or in timescale
# when using timescale, you need to have docker installed and can access the grafana dashboard
data_format = "local_db" # "local_db" or "timescale"
data_format = "timescale" # "local_db" or "timescale"

# select the example to run from the available examples above
example = "small_with_vre_and_storage"
example = "small_with_redispatch"

if data_format == "local_db":
db_uri = "sqlite:///./examples/local_db/assume_db.db"
Expand Down
8 changes: 3 additions & 5 deletions examples/inputs/example_01d/buses.csv
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
name,v_nom,zone_id,x,y
north_1,380.0,DE_1,10.0,54.0
north_2,380.0,DE_1,9.5,53.5
east,380.0,DE_1,13.4,52.5
south,380.0,DE_2,11.6,48.1
west,380.0,DE_2,7.0,51.5
north_1,380.0,north_1,10.0,54.0
north_2,380.0,north_2,9.5,53.5
south,380.0,south,11.6,48.1
Loading