diff --git a/README.md b/README.md index 650f581cd..f68d0cc17 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,7 @@ pip install 'assume-framework[learning]' Please keep in mind, that the above installation method will install pytorch package without CUDA support. If you want to make use of your GPU with CUDA cores, please install pytorch with GPU support separately as described [here](https://pytorch.org/get-started/locally/). -We also include **network-based market clearing algorithms** such as for the re-dispatch or nodal market clearing, which requires the PyPSA library. +We also include **network-based market clearing algorithms** such as for the re-dispatch, zonal clearing with NTCs and nodal market clearing, which all require the PyPSA library. To install the package with these capabilities, use: ```bash diff --git a/assume/markets/clearing_algorithms/__init__.py b/assume/markets/clearing_algorithms/__init__.py index e7838ef20..c45468a1e 100644 --- a/assume/markets/clearing_algorithms/__init__.py +++ b/assume/markets/clearing_algorithms/__init__.py @@ -21,7 +21,9 @@ # try importing pypsa if it is installed try: from .redispatch import RedispatchMarketRole + from .nodal_clearing import NodalClearingRole clearing_mechanisms["redispatch"] = RedispatchMarketRole + clearing_mechanisms["nodal_clearing"] = NodalClearingRole except ImportError: pass diff --git a/assume/markets/clearing_algorithms/complex_clearing.py b/assume/markets/clearing_algorithms/complex_clearing.py index a2dfbde30..85b1c121a 100644 --- a/assume/markets/clearing_algorithms/complex_clearing.py +++ b/assume/markets/clearing_algorithms/complex_clearing.py @@ -177,10 +177,18 @@ def energy_balance_rule(model, node, t): ) if incidence_matrix is not None: + # add NTCs model.transmission_constr = pyo.ConstraintList() for t in model.T: for line in model.lines: - capacity = lines.at[line, "s_nom"] + # s_max_pu might also be time variant. but for now we assume it is static + s_max_pu = ( + lines.at[line, "s_max_pu"] + if "s_max_pu" in lines.columns + and not pd.isna(lines.at[line, "s_max_pu"]) + else 1.0 + ) + capacity = lines.at[line, "s_nom"] * s_max_pu # Limit the flow on each line model.transmission_constr.add(model.flows[t, line] <= capacity) model.transmission_constr.add(model.flows[t, line] >= -capacity) @@ -290,8 +298,8 @@ class ComplexClearingRole(MarketRole): """ This class defines an optimization-based market clearing algorithm with support for complex bid types, including block bids, linked bids, minimum acceptance ratios, and profiled volumes. It supports network - representations with either zonal or nodal configurations, enabling the modeling of complex markets with - multiple zones and power flow constraints. + representations (through Net Transfer Capacities) with either zonal or nodal configurations, enabling the modeling of complex markets with + multiple zones based on a transport model. The market clearing algorithm accepts additional arguments via the ``param_dict`` in the market configuration. @@ -344,6 +352,11 @@ def __init__(self, marketconfig: MarketConfig): self.lines = self.grid_data["lines"] buses = self.grid_data["buses"] + if "x" in self.lines.columns: + logger.warning( + "Warning: 'lines.csv' contains reactances 'x' but this clearing is based on Net Transfer Capacities only (Transport model). Use 'nodal_clearing' to include a linear OPF." + ) + self.zones_id = self.marketconfig.param_dict.get("zones_identifier") self.node_to_zone = None @@ -779,7 +792,6 @@ def extract_results( if hasattr(model, "flows"): flows = model.flows - # filter flows and only use positive flows to half the size of the dict flows_filtered = { index: flow.value for index, flow in flows.items() if not flow.stale } diff --git a/assume/markets/clearing_algorithms/nodal_clearing.py b/assume/markets/clearing_algorithms/nodal_clearing.py new file mode 100644 index 000000000..cd161da95 --- /dev/null +++ b/assume/markets/clearing_algorithms/nodal_clearing.py @@ -0,0 +1,436 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +import logging +from datetime import timedelta +from operator import itemgetter + +import numpy as np +import pandas as pd +import pypsa +from mango import AgentAddress + +from assume.common.grid_utils import read_pypsa_grid +from assume.common.market_objects import MarketConfig, MarketProduct, Orderbook +from assume.common.utils import create_incidence_matrix +from assume.markets.base_market import MarketRole + +logger = logging.getLogger(__name__) + +# Set the log level to WARNING +logging.getLogger("linopy").setLevel(logging.WARNING) +logging.getLogger("pypsa").setLevel(logging.WARNING) + + +def calculate_meta(accepted_demand_orders, accepted_supply_orders, product): + supply_volume = sum(map(itemgetter("accepted_volume"), accepted_supply_orders)) + demand_volume = -sum(map(itemgetter("accepted_volume"), accepted_demand_orders)) + prices = list(map(itemgetter("accepted_price"), accepted_supply_orders)) or [0] + + duration_hours = (product[1] - product[0]) / timedelta(hours=1) + avg_price = 0 + if supply_volume: + weighted_price = [ + order["accepted_volume"] * order["accepted_price"] + for order in accepted_supply_orders + ] + avg_price = sum(weighted_price) / supply_volume + return { + "supply_volume": supply_volume, + "demand_volume": demand_volume, + "demand_volume_energy": demand_volume * duration_hours, + "supply_volume_energy": supply_volume * duration_hours, + "price": avg_price, + "max_price": max(prices), + "min_price": min(prices), + "node": None, + "product_start": product[0], + "product_end": product[1], + "only_hours": product[2], + } + + +class NodalClearingRole(MarketRole): + """ + This class implements a nodal market clearing mechanism using a linear optimal power flow (OPF) approach. + """ + + required_fields = ["node", "max_power"] + + def __init__(self, marketconfig: MarketConfig): + super().__init__(marketconfig) + + self.network = pypsa.Network() + + if not self.grid_data: + logger.error(f"Market '{marketconfig.market_id}': grid_data is missing.") + raise ValueError("grid_data is missing.") + + # Define grid data + self.nodes = ["node0"] + self.incidence_matrix = None + + self.lines = self.grid_data["lines"] + buses = self.grid_data["buses"] + + self.zones_id = self.marketconfig.param_dict.get("zones_identifier") + self.node_to_zone = None + + # Generate the incidence matrix and set the nodes based on zones or individual buses + if self.zones_id: + # Zonal Case + self.incidence_matrix = create_incidence_matrix( + self.lines, buses, zones_id=self.zones_id + ) + self.nodes = buses[self.zones_id].unique() + self.node_to_zone = buses[self.zones_id].to_dict() + else: + # Nodal Case + self.incidence_matrix = create_incidence_matrix(self.lines, buses) + self.nodes = buses.index.values + + self.log_flows = self.marketconfig.param_dict.get("log_flows", False) + self.pricing_mechanism = self.marketconfig.param_dict.get( + "pricing_mechanism", "pay_as_clear" + ) + if self.pricing_mechanism not in ["pay_as_bid", "pay_as_clear"]: + logger.error( + f"Market '{marketconfig.market_id}': Invalid payment mechanism '{self.pricing_mechanism}'." + ) + raise ValueError("Invalid payment mechanism.") + + # if we have multiple hours (count >1), we cannot handle storage units bids yet + # this is because the storage bids would be linked bids + storage_units = self.grid_data.get("storage_units", pd.DataFrame()) + if not storage_units.empty: + if self.marketconfig.market_products[0].count > 1: + # make sure storages potentially present in the grid do not participate in this market + if not ( + self.grid_data["storage_units"][ + f"bidding_{self.marketconfig.market_id}" + ].isin(["-", ""]) + | self.grid_data["storage_units"][ + f"bidding_{self.marketconfig.market_id}" + ].isna() + ).all(): + logger.error( + f"Market '{marketconfig.market_id}': Nodal clearing with multiple product counts does not support storage unit bids yet." + ) + raise NotImplementedError( + "Nodal clearing with multiple product counts does not support storage unit bids yet." + ) + + read_pypsa_grid( + network=self.network, + grid_dict=self.grid_data, + ) + + # add all units to the PyPSA network as generators with p_nom their absolute max power + # generators have p_min_pu - p_max_pu 0 to 1 + self.network.add( + "Generator", + self.grid_data["generators"].index, + bus=self.grid_data["generators"]["node"], + p_nom=self.grid_data["generators"]["max_power"], + p_min_pu=0, + p_max_pu=1, + ) + # demand units have p_min_pu - p_max_pu -1 to 0 + self.network.add( + "Generator", + self.grid_data["loads"].index, + bus=self.grid_data["loads"]["node"], + p_nom=self.grid_data["loads"]["max_power"], + p_min_pu=-1, + p_max_pu=0, + ) + # storage units + # also add them as generators, as we only regard bids here and are not interested in their internal state + # we take the max of discharging and charging power as p_nom for PyPSA. Bids are later used to set p_min_pu and p_max_pu accordingly. + if not storage_units.empty: + self.network.add( + "Generator", + self.grid_data["storage_units"].index, + bus=self.grid_data["storage_units"]["node"], + p_nom=np.maximum( + self.grid_data["storage_units"]["max_power_discharge"].values, + self.grid_data["storage_units"]["max_power_charge"].values, + ), + p_min_pu=-1, + p_max_pu=1, + ) + + self.solver = marketconfig.param_dict.get("solver", "highs") + if self.solver == "gurobi": + self.solver_options = {"LogToConsole": 0, "OutputFlag": 0} + elif self.solver == "highs": + self.solver_options = {"output_flag": False, "log_to_console": False} + else: + self.solver_options = {} + + def validate_orderbook( + self, orderbook: Orderbook, agent_addr: AgentAddress + ) -> None: + """ + Checks whether the bid types are valid and whether the volumes are within the maximum bid volume. + + Args: + orderbook (Orderbook): The orderbook to be validated. + agent_addr (AgentAddress): The agent address of the market. + + Raises: + ValueError: If the bid type is invalid. + """ + market_id = self.marketconfig.market_id + + for order in orderbook: + # if bid_type is None, set to default bid_type + if order.get("bid_type") is None: + order["bid_type"] = "SB" + # Validate bid_type + elif order["bid_type"] in ["BB", "LB"]: + raise ValueError( + f"Market '{market_id}': Invalid bid_type '{order['bid_type']}' in order {order}. Nodal clearing nly supports 'SB' bid type. Use 'complex_clearing' for BB and LB bid types." + ) + # validate prices and volumes using base market role validation + super().validate_orderbook(orderbook, agent_addr) + + for order in orderbook: + # Node validation + node = order.get("node") + if node: + if self.zones_id: + node = self.node_to_zone.get(node, self.nodes[0]) + order["node"] = node + if node not in self.nodes: + logger.warning( + f"Market '{market_id}': Node '{node}' not in nodes list {self.nodes}. Setting to first node '{self.nodes[0]}'. Order details: {order}" + ) + order["node"] = self.nodes[0] + else: + if self.incidence_matrix is not None: + logger.warning( + f"Market '{market_id}': Order without a node, setting node to the first node '{self.nodes[0]}'. Please check the bidding strategy if correct node is set. Order details: {order}" + ) + order["node"] = self.nodes[0] + else: + logger.warning( + f"Market '{market_id}': Order without a node and no incidence matrix, setting node to 'node0'. Order details: {order}" + ) + order["node"] = "node0" + + def clear( + self, orderbook: Orderbook, market_products + ) -> tuple[Orderbook, Orderbook, list[dict], dict[tuple, float]]: + """ + Performs nodal clearing based on optimal linear power flow. + The returned orderbook contains accepted orders with the accepted volumes and prices. + + Args: + orderbook (Orderbook): The orderbook to be cleared. + market_products (list[MarketProduct]): The products for which clearing happens. + + Returns: + Tuple[Orderbook, Orderbook, List[dict]]: The accepted orderbook, rejected orderbook and market metadata. + """ + + if len(orderbook) == 0: + return super().clear(orderbook, market_products) + orderbook_df = pd.DataFrame(orderbook) + orderbook_df["accepted_volume"] = 0.0 + orderbook_df["accepted_price"] = 0.0 + + snapshots = pd.date_range( + start=market_products[0][0], # start time + end=market_products[-1][0], # end time + freq=self.marketconfig.market_products[0].duration, + ) + + # Now you can pivot the DataFrame + volume_pivot = orderbook_df.pivot( + index="start_time", columns="unit_id", values="volume" + ) + # volume_pivot.index = snapshots + price_pivot = orderbook_df.pivot( + index="start_time", columns="unit_id", values="price" + ) + # Copy the network + n = self.network.copy() + + n.set_snapshots(snapshots) + + # Update p_max_pu for all units based on their bids in the actual snapshots + # generators + gen_idx = self.grid_data["generators"].index + gen_idx = gen_idx.intersection(volume_pivot.columns) + n.generators_t.p_max_pu.loc[snapshots, gen_idx] = ( + volume_pivot[gen_idx] / n.generators.loc[gen_idx, "p_nom"].values + ) + n.generators_t.marginal_cost.loc[snapshots, gen_idx] = price_pivot[gen_idx] + # demand + demand_idx = self.grid_data["loads"].index + demand_idx = demand_idx.intersection(volume_pivot.columns) + n.generators_t.p_min_pu.loc[snapshots, demand_idx] = ( + volume_pivot[demand_idx] / n.generators.loc[demand_idx, "p_nom"].values + ) + n.generators_t.marginal_cost.loc[snapshots, demand_idx] = price_pivot[ + demand_idx + ] + + # storage + if self.grid_data.get("storage_units") is not None: + storage_idx = self.grid_data["storage_units"].index + storage_idx = storage_idx.intersection(volume_pivot.columns) + # discharging (positive bids) + n.generators_t.p_max_pu.loc[snapshots, storage_idx] = ( + volume_pivot[storage_idx].clip(lower=0).fillna(0) + / n.generators.loc[storage_idx, "p_nom"].values + ) + # charging (negative bids) + n.generators_t.p_min_pu.loc[snapshots, storage_idx] = ( + volume_pivot[storage_idx].clip(upper=0).fillna(0) + / n.generators.loc[storage_idx, "p_nom"].values + ) + # set bid price as marginal costs in the respective hours + n.generators_t.marginal_cost.loc[snapshots, storage_idx] = price_pivot[ + storage_idx + ].fillna(0) + + # run linear optimal powerflow + n.optimize.fix_optimal_capacities() + status, termination_condition = n.optimize( + solver=self.solver, + solver_options=self.solver_options, + progress=False, + ) + + if status != "ok": + logger.error(f"Solver exited with {termination_condition}") + raise Exception("Solver in nodal clearing did not converge") + + # Find intersection of unit_ids in orderbook_df and columns in n.generators_t.p + valid_units = orderbook_df["unit_id"].unique() + dispatch = n.generators_t.p + + for unit in valid_units: + if unit in dispatch.columns: + # get accepted volume and price for each time snapshot + accepted_volumes = dispatch[unit] + if self.pricing_mechanism == "pay_as_clear": + accepted_prices = n.buses_t.marginal_price.loc[ + :, n.generators.loc[unit, "bus"] + ] + elif self.pricing_mechanism == "pay_as_bid": + accepted_prices = price_pivot[unit] + else: + raise ValueError("Invalid pricing mechanism.") + + # update orderbook_df with accepted volumes and prices + for t, (vol, price) in enumerate( + zip(accepted_volumes, accepted_prices) + ): + mask = (orderbook_df["unit_id"] == unit) & ( + orderbook_df["start_time"] == snapshots[t] + ) + orderbook_df.loc[mask, "accepted_volume"] = vol + orderbook_df.loc[mask, "accepted_price"] = price + + # return orderbook_df back to orderbook format as list of dicts + accepted_orders = orderbook_df[orderbook_df["accepted_volume"] != 0].to_dict( + "records" + ) + rejected_orders = orderbook_df[orderbook_df["accepted_volume"] == 0].to_dict( + "records" + ) + for order in rejected_orders: + # set the accepted price for each rejected order to zero# + # this is not yet done concisely across the framework + order["accepted_price"] = 0 + market_clearing_prices = n.buses_t.marginal_price.to_dict() + + meta = [] + flows = {} + + accepted_orders, rejected_orders, meta, flows = extract_results( + network=n, + accepted_orders=accepted_orders, + rejected_orders=rejected_orders, + market_products=market_products, + market_clearing_prices=market_clearing_prices, + log_flows=self.log_flows, + ) + + return accepted_orders, rejected_orders, meta, flows + + +def extract_results( + network: pypsa.Network, + accepted_orders: Orderbook, + rejected_orders: Orderbook, + market_products: list[MarketProduct], + market_clearing_prices: dict, + log_flows: bool = False, +): + """ + Extracts the results of the market clearing from the solved PyPSA model. + + Args: + network (pypsa.Network): The PyPSA network after solving the market clearing. + accepted_orders (Orderbook): List of the accepted orders + rejected_orders (Orderbook): List of the rejected orders + market_products (list[MarketProduct]): The products to be traded + market_clearing_prices (dict): The market clearing prices + log_flows (bool): Whether to log network flows + + Returns: + tuple[Orderbook, Orderbook, list[dict], dict]: The accepted orders, rejected orders, meta information, and network flows + + """ + meta = [] + supply_volume_dict = { + node: {t: 0.0 for t in network.snapshots} for node in network.buses.index + } + demand_volume_dict = { + node: {t: 0.0 for t in network.snapshots} for node in network.buses.index + } + + for order in accepted_orders: + node = order["node"] + t = order["start_time"] + if order["accepted_volume"] > 0: + supply_volume_dict[node][t] += order["accepted_volume"] + else: + demand_volume_dict[node][t] += order["accepted_volume"] + + # write the meta information for each hour of the clearing period + for node in network.buses.index: + for product in market_products: + t = product[0] + clear_price = market_clearing_prices[node][t] + supply_volume = supply_volume_dict[node][t] + demand_volume = demand_volume_dict[node][t] + duration_hours = (product[1] - product[0]) / timedelta(hours=1) + + meta.append( + { + "supply_volume": supply_volume, + "demand_volume": -demand_volume, + "demand_volume_energy": -demand_volume * duration_hours, + "supply_volume_energy": supply_volume * duration_hours, + "price": clear_price, + "max_price": clear_price, + "min_price": clear_price, + "node": node, + "product_start": product[0], + "product_end": product[1], + "only_hours": product[2], + } + ) + + flows = {} + if log_flows: + # extract flows + flows = network.lines_t.p0.stack(future_stack=True).to_dict() + + return accepted_orders, rejected_orders, meta, flows diff --git a/assume/markets/clearing_algorithms/redispatch.py b/assume/markets/clearing_algorithms/redispatch.py index 9473a31ad..5e68f6a14 100644 --- a/assume/markets/clearing_algorithms/redispatch.py +++ b/assume/markets/clearing_algorithms/redispatch.py @@ -182,8 +182,8 @@ def clear( redispatch_network.lpf() # check lines for congestion where power flow is larger than s_nom - line_loading = ( - redispatch_network.lines_t.p0.abs() / redispatch_network.lines.s_nom + line_loading = redispatch_network.lines_t.p0.abs() / ( + redispatch_network.lines.s_nom * redispatch_network.lines.s_max_pu ) # if any line is congested, perform redispatch diff --git a/assume/scenario/loader_csv.py b/assume/scenario/loader_csv.py index 078386fa8..84a96c81a 100644 --- a/assume/scenario/loader_csv.py +++ b/assume/scenario/loader_csv.py @@ -346,16 +346,29 @@ def make_market_config( def read_grid(network_path: str | Path) -> dict[str, pd.DataFrame]: network_path = Path(network_path) - buses = pd.read_csv(network_path / "buses.csv", index_col=0) - lines = pd.read_csv(network_path / "lines.csv", index_col=0) - generators = pd.read_csv(network_path / "powerplant_units.csv", index_col=0) - loads = pd.read_csv(network_path / "demand_units.csv", index_col=0) + buses = None + lines = None + generators = None + loads = None + storage_units = None + + if (network_path / "buses.csv").exists(): + buses = pd.read_csv(network_path / "buses.csv", index_col=0) + if (network_path / "lines.csv").exists(): + lines = pd.read_csv(network_path / "lines.csv", index_col=0) + if (network_path / "powerplant_units.csv").exists(): + generators = pd.read_csv(network_path / "powerplant_units.csv", index_col=0) + if (network_path / "demand_units.csv").exists(): + loads = pd.read_csv(network_path / "demand_units.csv", index_col=0) + if (network_path / "storage_units.csv").exists(): + storage_units = pd.read_csv(network_path / "storage_units.csv", index_col=0) return { "buses": buses, "lines": lines, "generators": generators, "loads": loads, + "storage_units": storage_units, } diff --git a/docs/source/assume.markets.clearing_algorithms.rst b/docs/source/assume.markets.clearing_algorithms.rst index f5598c8e8..c6d855e16 100644 --- a/docs/source/assume.markets.clearing_algorithms.rst +++ b/docs/source/assume.markets.clearing_algorithms.rst @@ -53,6 +53,12 @@ Redispatch Clearing Algorithm :undoc-members: :show-inheritance: +Nodal Clearing Algorithm +------------------------ +.. automodule:: assume.markets.clearing_algorithms.nodal_clearing + :members: + :undoc-members: + :show-inheritance: Additional Details ------------------ diff --git a/docs/source/market_config.rst b/docs/source/market_config.rst index f97136e86..f103ff697 100644 --- a/docs/source/market_config.rst +++ b/docs/source/market_config.rst @@ -108,6 +108,13 @@ In our market trading system, we follow this convention for representing the vol This convention ensures clarity and consistency in how trades are represented and interpreted within the market. By using positive and negative volumes to indicate the direction of trades, we can easily distinguish between buying and selling activities while maintaining a straightforward and unambiguous pricing structure. +Please note the following limitation +------------------------------------ + +All currently implemented `bidding_strategies` in ASSUME do not handle feasibility constraints with regard to the dispatch in market mechanisms with multiple products (count > 1). +This means that for markets with multiple products, the bidding strategies do consider the technical feasibility of dispatching units across all products when formulating bids, but not after the market clearing when some bids are rejected. +Therefore, we do not advise to use markets with multiple products in combination with units that require strong time couling such as storages or dispatchable units when `start_up_times` are considered. +This is a known limitation in agent-based modelling and does underestimate the risk of infeasible dispatches for power plant operators. Example Configuration - CRM Market ---------------------------------- diff --git a/docs/source/market_mechanism.rst b/docs/source/market_mechanism.rst index cd94cd6be..d73c71076 100644 --- a/docs/source/market_mechanism.rst +++ b/docs/source/market_mechanism.rst @@ -35,7 +35,8 @@ The available market mechanisms are the following: 3. :py:meth:`assume.markets.clearing_algorithms.complex_clearing.ComplexClearingRole` 4. :py:meth:`assume.markets.clearing_algorithms.complex_clearing_dmas.ComplexDmasClearingRole` 5. :py:meth:`assume.markets.clearing_algorithms.redispatch.RedispatchMarketRole` -6. :py:meth:`assume.markets.clearing_algorithms.contracts.PayAsBidContractRole` +6. :py:meth:`assume.markets.clearing_algorithms.nodal_clearing.NodalClearingRole` +7. :py:meth:`assume.markets.clearing_algorithms.contracts.PayAsBidContractRole` The :code:`PayAsClearRole` performs an electricity market clearing using a pay-as-clear mechanism. @@ -78,6 +79,8 @@ is given by: :math:`\mathbf{a}_{c, p} \: u_c \leq u_{p} \quad \forall \: c, p \i with the incidence matrix :math:`\mathbf{a}_{c, p}` defining the links between bids as 1, if c is linked as child to p, 0 else. +Flows in the network are limited by the Net Transfer Capacity ('s_nom') of each line l: :math:`\quad -NTC_{l} \leq F_{l, t} \leq NTC_{l} \quad \forall \: l \in \mathcal{L}, t \in \mathcal{T}`, + Because with this algorithm, paradoxically accepted bids (PABs) can occur, the objective is solved in an iterative manner: 1. The optimization problem is solved with the objective function and all constraints. @@ -90,4 +93,11 @@ Because with this algorithm, paradoxically accepted bids (PABs) can occur, the o If you want a hands-on use-case of the complex clearing check out the prepared tutorial in Colab: https://colab.research.google.com/github/assume-framework/assume +Nodal clearing +================= + +The :code:`NodalClearingRole` performs an electricity market clearing of the bids submitted by market participants using an optimal power flow (OPF) approach. +Profile, block and linked orders are not supported. +The algorithm utilizes PyPSA to solve the OPF problem, allowing for a physics based representation of network constraints. + .. include:: redispatch_modeling.rst diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 9e9fcf1d7..97376c83f 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -42,6 +42,7 @@ Upcoming Release **New Features:** - **Unit Operator Portfolio Strategy**: A new bidding strategy type that enables portfolio optimization, where the default is called `UnitsOperatorEnergyNaiveDirectStrategy`. This strategy simply passes through bidding decisions of individual units within a portfolio, which was the default behavior beforehand as well. Further we added 'UnitsOperatorEnergyHeuristicCournotStrategy' which allows to model bidding behavior of a portfolio of units in a day-ahead market. The strategy calculates the optimal bid price and quantity for each unit in the portfolio, taking into account markup and the production costs of the units. This enables users to simulate and analyze the impact of strategic portfolio bidding on market outcomes and unit profitability. +- **Nodal Market Clearing Algorithm**: A new market clearing algorithm that performs electricity market clearing using an optimal power flow (OPF) approach, considering grid constraints and nodal pricing. This algorithm utilizes PyPSA to solve the OPF problem, allowing for a physics based representation of network constraints. 0.5.5 - (13th August 2025) ========================== diff --git a/examples/examples.py b/examples/examples.py index f334a4ceb..6db174f9d 100644 --- a/examples/examples.py +++ b/examples/examples.py @@ -54,6 +54,10 @@ "scenario": "example_01d", "study_case": "zonal_case", }, + "small_with_nodal_clearing": { + "scenario": "example_01d", + "study_case": "nodal_case", + }, # example_01e is used in the tutorial notebook #3: Custom unit and bidding strategy example "market_study_eom": { "scenario": "example_01f", diff --git a/examples/inputs/example_01d/buses.csv b/examples/inputs/example_01d/buses.csv index a9485e59a..b20b73ba6 100644 --- a/examples/inputs/example_01d/buses.csv +++ b/examples/inputs/example_01d/buses.csv @@ -1,6 +1,4 @@ name,v_nom,zone_id,x,y -north_1,380.0,DE_1,10.0,54.0 -north_2,380.0,DE_1,9.5,53.5 -east,380.0,DE_1,13.4,52.5 -south,380.0,DE_2,11.6,48.1 -west,380.0,DE_2,7.0,51.5 +north_1,380.0,north_1,10.0,54.0 +north_2,380.0,north_2,9.5,53.5 +south,380.0,south,11.6,48.1 diff --git a/examples/inputs/example_01d/config.yaml b/examples/inputs/example_01d/config.yaml index 4ab1218c7..40e6718f8 100644 --- a/examples/inputs/example_01d/config.yaml +++ b/examples/inputs/example_01d/config.yaml @@ -4,7 +4,7 @@ base: start_date: 2019-01-01 00:00 - end_date: 2019-01-06 00:00 + end_date: 2019-01-04 00:00 time_step: 1h save_frequency_hours: 24 @@ -24,7 +24,16 @@ base: maximum_bid_price: 3000 minimum_bid_price: -500 price_unit: EUR/MWh - market_mechanism: pay_as_clear + market_mechanism: complex_clearing + additional_fields: + - node + - max_power + param_dict: + network_path: . + solver: highs + zones_identifier: zone_id + pricing_mechanism: pay_as_clear + log_flows: true redispatch: start_date: 2019-01-01 21:00 @@ -82,3 +91,62 @@ zonal_case: additional_fields: - bid_type - node + +nodal_case: + start_date: 2019-01-01 00:00 + end_date: 2019-01-04 00:00 + time_step: 1h + save_frequency_hours: 24 + + markets_config: + nodal: + start_date: 2019-01-01 00:00 + operator: EOM_operator + product_type: energy + products: + - duration: 1h + count: 24 + first_delivery: 1h + opening_frequency: 24h + opening_duration: 1h + volume_unit: MWh + maximum_bid_volume: 100000 + maximum_bid_price: 3000 + minimum_bid_price: -500 + price_unit: EUR/MWh + market_mechanism: nodal_clearing + additional_fields: + - node + - max_power + param_dict: + network_path: . + solver: highs + zones_identifier: zone_id + pricing_mechanism: pay_as_clear + log_flows: true + + redispatch: + start_date: 2019-01-01 21:00 + operator: network_operator + product_type: energy + products: + - duration: 1h + count: 24 + first_delivery: 3h + opening_frequency: 24h + opening_duration: 2h + volume_unit: MWh + maximum_bid_volume: 100000 + maximum_bid_price: 3000 + minimum_bid_price: -500 + price_unit: EUR/MWh + market_mechanism: redispatch + additional_fields: + - node + - min_power + - max_power + param_dict: + network_path: . + solver: highs + payment_mechanism: pay_as_bid + backup_marginal_cost: 10000 diff --git a/examples/inputs/example_01d/demand_units.csv b/examples/inputs/example_01d/demand_units.csv index ec99b4706..bf427b668 100644 --- a/examples/inputs/example_01d/demand_units.csv +++ b/examples/inputs/example_01d/demand_units.csv @@ -1,6 +1,4 @@ name,technology,bidding_EOM,bidding_redispatch,bidding_nodal,bidding_DAM,max_power,min_power,unit_operator,node -demand_north_1,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,100000,0,eom_de,north_1 +demand_north_1,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,200000,0,eom_de,north_1 demand_north_2,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,100000,0,eom_de,north_2 -demand_east,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,100000,0,eom_de,east -demand_south,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,100000,0,eom_de,south -demand_west,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,100000,0,eom_de,west +demand_south,inflex_demand,demand_energy_naive,demand_energy_naive_redispatch,demand_energy_naive,demand_energy_naive,200000,0,eom_de,south diff --git a/examples/inputs/example_01d/lines.csv b/examples/inputs/example_01d/lines.csv index 37f8d64c5..5d4a5f694 100644 --- a/examples/inputs/example_01d/lines.csv +++ b/examples/inputs/example_01d/lines.csv @@ -1,7 +1,4 @@ -line,bus0,bus1,s_nom,x,r -Line_N_S_1,north_1,south,5000.0,0.01,0.001 -Line_N_S_2,north_2,south,5000.0,0.01,0.001 -Line_N_N,north_1,north_2,5000.0,0.01,0.001 -Line_N_W,north_1,west,5000.0,0.01,0.001 -Line_N_E,north_2,east,5000.0,0.01,0.001 -Line_S_E,south,east,5000.0,0.01,0.001 +line,bus0,bus1,s_nom,s_max_pu,x,r +Line_N_S_1,north_1,south,5000.0,1,0.01,0.001 +Line_N_S_2,north_2,south,5000.0,1,0.01,0.001 +Line_N_N,north_1,north_2,5000.0,1,0.01,0.001 diff --git a/examples/inputs/example_01d/powerplant_units.csv b/examples/inputs/example_01d/powerplant_units.csv index 2a48789e9..0f22f5337 100644 --- a/examples/inputs/example_01d/powerplant_units.csv +++ b/examples/inputs/example_01d/powerplant_units.csv @@ -19,11 +19,11 @@ Unit 17,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,power Unit 18,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,22,north_2,Operator 2 Unit 19,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,23,north_2,Operator 2 Unit 20,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,24,north_2,Operator 2 -Unit 21,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,25,east,Operator 3 -Unit 22,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,26,east,Operator 3 -Unit 23,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,27,east,Operator 3 -Unit 24,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,28,east,Operator 3 -Unit 25,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,29,east,Operator 3 +Unit 21,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,25,south,Operator 3 +Unit 22,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,26,south,Operator 3 +Unit 23,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,27,south,Operator 3 +Unit 24,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,28,south,Operator 3 +Unit 25,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,29,south,Operator 3 Unit 26,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,30,south,Operator 4 Unit 27,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,31,south,Operator 4 Unit 28,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,32,south,Operator 4 @@ -39,13 +39,13 @@ Unit 37,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,power Unit 38,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,42,south,Operator 4 Unit 39,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,43,south,Operator 4 Unit 40,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,44,south,Operator 4 -Unit 41,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,45,west,Operator 5 -Unit 42,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,46,west,Operator 5 -Unit 43,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,47,west,Operator 5 -Unit 44,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,48,west,Operator 5 -Unit 45,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,49,west,Operator 5 -Unit 46,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,50,west,Operator 5 -Unit 47,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,51,west,Operator 5 -Unit 48,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,52,west,Operator 5 -Unit 49,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,53,west,Operator 5 -Unit 50,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,54,west,Operator 5 +Unit 41,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,45,north_1,Operator 5 +Unit 42,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,46,north_1,Operator 5 +Unit 43,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,47,north_1,Operator 5 +Unit 44,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,48,north_1,Operator 5 +Unit 45,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,49,north_1,Operator 5 +Unit 46,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,50,north_1,Operator 5 +Unit 47,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,51,north_1,Operator 5 +Unit 48,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,52,north_1,Operator 5 +Unit 49,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,53,north_1,Operator 5 +Unit 50,nuclear,powerplant_energy_naive,powerplant_energy_naive_redispatch,powerplant_energy_naive,powerplant_energy_heuristic_block,uranium,0.0,1000.0,0,0.3,54,north_1,Operator 5 diff --git a/tests/test_nodal_clearing.py b/tests/test_nodal_clearing.py new file mode 100644 index 000000000..13f838b0d --- /dev/null +++ b/tests/test_nodal_clearing.py @@ -0,0 +1,396 @@ +# SPDX-FileCopyrightText: ASSUME Developers +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +import math +from datetime import datetime, timedelta + +import pandas as pd +import pytest +from dateutil import rrule as rr + +from assume.common.market_objects import MarketConfig, MarketProduct, Order +from assume.common.utils import get_available_products + +try: + from assume.markets.clearing_algorithms import NodalClearingRole +except ImportError: + pass + +simple_nodal_auction_config = MarketConfig( + market_id="simple_nodal_auction", + market_products=[MarketProduct(timedelta(hours=1), 1, timedelta(hours=1))], + additional_fields=["node"], + opening_hours=rr.rrule( + rr.HOURLY, + dtstart=datetime(2005, 6, 1), + until=datetime(2005, 6, 2), + cache=True, + ), + opening_duration=timedelta(hours=1), + volume_unit="MW", + volume_tick=0.1, + maximum_bid_volume=None, + price_unit="€/MW", + market_mechanism="nodal_clearing", +) +eps = 1e-4 + + +@pytest.mark.require_network +def test_nodal_clearing_two_hours(): + market_config = simple_nodal_auction_config + h = 2 + market_config.market_products = [ + MarketProduct(timedelta(hours=1), h, timedelta(hours=1)) + ] + market_config.additional_fields = [ + "bid_type", + "node_id", + ] + # Create a dictionary with the data + nodes = { + "name": ["node1", "node2", "node3"], + "v_nom": [380.0, 380.0, 380.0], + } + # Convert the dictionary to a Pandas DataFrame with 'name' as the index + nodes = pd.DataFrame(nodes).set_index("name") + + # Create a dictionary with lines data + lines = { + "name": ["line_1_2", "line_1_3", "line_2_3"], + "bus0": ["node1", "node1", "node2"], + "bus1": ["node2", "node3", "node3"], + "s_nom": [5000.0, 5000.0, 5000.0], + "x": [0.01, 0.01, 0.01], + "r": [0.001, 0.001, 0.001], + } + # Convert the dictionary to a Pandas DataFrame + lines = pd.DataFrame(lines).set_index("name") + + # Create dictionary with generators data + generators = { + "name": [f"gen{p}" for p in range(5, 35)], + "node": ["node1"] * 10 + ["node2"] * 10 + ["node3"] * 10, + "max_power": [1000.0] * 30, + } + generators = pd.DataFrame(generators).set_index("name") + # Create dictionary with loads data + loads = { + "name": ["dem1", "dem2", "dem3"], + "node": ["node1", "node2", "node3"], + "max_power": [4400.0, 4400.0, 17400.0], + } + loads = pd.DataFrame(loads).set_index("name") + + grid_data = { + "buses": nodes, + "lines": lines, + "generators": generators, + "loads": loads, + } + market_config.param_dict["grid_data"] = grid_data + market_config.param_dict["log_flows"] = True + next_opening = market_config.opening_hours.after(datetime(2005, 6, 1)) + products = get_available_products(market_config.market_products, next_opening) + assert len(products) == h + + orderbook = [] + order: Order = { + "start_time": products[0][0], + "end_time": products[0][1], + "unit_id": "dem1", + "bid_id": "bid1", + "volume": 0, + "price": 0, + "only_hours": None, + "node": 0, + } + i = 0 + for v, p in zip([-2400, -4400], [3000, 3000]): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = v + new_order["price"] = p + new_order["node"] = "node1" + new_order["bid_id"] = f"dem1_{i}" + new_order["unit_id"] = "dem1" + orderbook.append(new_order) + i += 1 + + i = 0 + for v, p in zip([-2400, -4400], [3000, 3000]): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = v + new_order["price"] = p + new_order["node"] = "node2" + new_order["bid_id"] = f"dem2_{i}" + new_order["unit_id"] = "dem2" + orderbook.append(new_order) + i += 1 + + i = 0 + for v, p in zip([-17400, -14400], [3000, 3000]): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = v + new_order["price"] = p + new_order["node"] = "node3" + new_order["bid_id"] = f"dem3_{i}" + new_order["unit_id"] = "dem3" + orderbook.append(new_order) + i += 1 + + for i in range(h): + for p in range(5, 15): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node1" + new_order["bid_id"] = f"gen{p}_{i}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + for p in range(15, 25): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node2" + new_order["bid_id"] = f"gen{p}_{i}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + for p in range(25, 35): + new_order = order.copy() + new_order["start_time"] = products[0][i] + new_order["end_time"] = products[0][i + 1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node3" + new_order["bid_id"] = f"gen{p}_{i}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + + mr = NodalClearingRole(market_config) + accepted_orders, rejected_orders, meta, flows = mr.clear(orderbook, products) + + assert meta[0]["node"] == "node1" + assert meta[2]["node"] == "node2" + assert meta[4]["node"] == "node3" + assert math.isclose(meta[0]["supply_volume"], 7600, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["supply_volume"], 10000, abs_tol=eps) # node1 hour 1 + assert math.isclose(meta[2]["supply_volume"], 7000, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[3]["supply_volume"], 8200, abs_tol=eps) # node2 hour 1 + assert math.isclose(meta[4]["supply_volume"], 7600, abs_tol=eps) # node3 hour 0 + assert math.isclose(meta[5]["supply_volume"], 5000, abs_tol=eps) # node3 hour 1 + assert math.isclose(meta[0]["demand_volume"], 2400, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["demand_volume"], 4400, abs_tol=eps) # node1 hour 1 + assert math.isclose(meta[2]["demand_volume"], 2400, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[3]["demand_volume"], 4400, abs_tol=eps) # node2 hour 1 + assert math.isclose(meta[4]["demand_volume"], 17400, abs_tol=eps) # node3 hour 0 + assert math.isclose(meta[5]["demand_volume"], 14400, abs_tol=eps) # node3 hour 1 + + assert math.isclose(meta[0]["price"], 12, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["price"], 17, abs_tol=eps) # node1 hour 1 + assert math.isclose(meta[2]["price"], 22, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[3]["price"], 23, abs_tol=eps) # node2 hour 1 + assert math.isclose(meta[4]["price"], 32, abs_tol=eps) # node3 hour 0 + assert math.isclose(meta[5]["price"], 29, abs_tol=eps) # node3 hour 1 + + flows_df = pd.Series(flows).unstack() + assert math.isclose(flows_df.loc[products[0][0], "line_1_2"], 200, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][1], "line_1_2"], 600, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][0], "line_1_3"], 5000, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][1], "line_1_3"], 5000, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][0], "line_2_3"], 4800, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][1], "line_2_3"], 4400, abs_tol=eps) + + +@pytest.mark.require_network +def test_nodal_clearing_with_storage_single_hour(): + market_config = simple_nodal_auction_config + h = 1 + market_config.market_products = [ + MarketProduct(timedelta(hours=1), h, timedelta(hours=1)) + ] + market_config.additional_fields = [ + "bid_type", + "node_id", + ] + # Create a dictionary with the data + nodes = { + "name": ["node1", "node2", "node3"], + "v_nom": [380.0, 380.0, 380.0], + } + # Convert the dictionary to a Pandas DataFrame with 'name' as the index + nodes = pd.DataFrame(nodes).set_index("name") + + # Create a dictionary with lines data + lines = { + "name": ["line_1_2", "line_1_3", "line_2_3"], + "bus0": ["node1", "node1", "node2"], + "bus1": ["node2", "node3", "node3"], + "s_nom": [5000.0, 5000.0, 5000.0], + "x": [0.01, 0.01, 0.01], + "r": [0.001, 0.001, 0.001], + } + # Convert the dictionary to a Pandas DataFrame + lines = pd.DataFrame(lines).set_index("name") + + # Create dictionary with generators data + generators = { + "name": [f"gen{p}" for p in range(5, 35)], + "node": ["node1"] * 10 + ["node2"] * 10 + ["node3"] * 10, + "max_power": [1000.0] * 30, + } + generators = pd.DataFrame(generators).set_index("name") + # Create dictionary with loads data + loads = { + "name": ["dem1", "dem2", "dem3"], + "node": ["node1", "node2", "node3"], + "max_power": [4400.0, 4400.0, 17400.0], + } + loads = pd.DataFrame(loads).set_index("name") + # Create dictionary with storage data + storage_units = { + "name": ["storage5", "storage50"], + "node": ["node1", "node3"], + "max_power_charge": [1000.0, 1000.0], + "max_power_discharge": [1000.0, 1000.0], + } + storage_units = pd.DataFrame(storage_units).set_index("name") + + grid_data = { + "buses": nodes, + "lines": lines, + "generators": generators, + "loads": loads, + "storage_units": storage_units, + } + market_config.param_dict["grid_data"] = grid_data + market_config.param_dict["log_flows"] = True + next_opening = market_config.opening_hours.after(datetime(2005, 6, 1)) + products = get_available_products(market_config.market_products, next_opening) + assert len(products) == h + + orderbook = [] + order: Order = { + "start_time": products[0][0], + "end_time": products[0][1], + "unit_id": "dem1", + "bid_id": "bid1", + "volume": 0, + "price": 0, + "only_hours": None, + "node": 0, + } + + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = -2400 + new_order["price"] = 3000 + new_order["node"] = "node1" + new_order["bid_id"] = f"dem1_{0}" + new_order["unit_id"] = "dem1" + orderbook.append(new_order) + + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = -2400 + new_order["price"] = 3000 + new_order["node"] = "node2" + new_order["bid_id"] = f"dem2_{0}" + new_order["unit_id"] = "dem2" + orderbook.append(new_order) + + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = -16400 + new_order["price"] = 3000 + new_order["node"] = "node3" + new_order["bid_id"] = f"dem3_{0}" + new_order["unit_id"] = "dem3" + orderbook.append(new_order) + + for p in range(5, 15): + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node1" + new_order["bid_id"] = f"gen{p}_{0}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + for p in range(15, 25): + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node2" + new_order["bid_id"] = f"gen{p}_{0}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + for p in range(25, 35): + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = 1000 + new_order["price"] = p + new_order["node"] = "node3" + new_order["bid_id"] = f"gen{p}_{0}" + new_order["unit_id"] = f"gen{p}" + orderbook.append(new_order) + + # add storage bids (1000 discharging @ 5 €/MW at node1) + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = 1000 + new_order["price"] = 5 + new_order["node"] = "node1" + new_order["bid_id"] = f"discharge{5}_{0}" + new_order["unit_id"] = f"storage{5}" + orderbook.append(new_order) + # add storage bids (1000 charging @ 50 €/MW at node3) + new_order = order.copy() + new_order["start_time"] = products[0][0] + new_order["end_time"] = products[0][1] + new_order["volume"] = -1000 + new_order["price"] = 50 + new_order["node"] = "node3" + new_order["bid_id"] = f"charge{50}_{0}" + new_order["unit_id"] = f"storage{50}" + orderbook.append(new_order) + + mr = NodalClearingRole(market_config) + accepted_orders, rejected_orders, meta, flows = mr.clear(orderbook, products) + + assert meta[0]["node"] == "node1" + assert meta[1]["node"] == "node2" + assert meta[2]["node"] == "node3" + assert math.isclose(meta[0]["supply_volume"], 7600, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["supply_volume"], 7000, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[2]["supply_volume"], 7600, abs_tol=eps) # node3 hour 0 + assert math.isclose(meta[0]["demand_volume"], 2400, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["demand_volume"], 2400, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[2]["demand_volume"], 17400, abs_tol=eps) # node3 hour 0 + + assert math.isclose(meta[0]["price"], 11, abs_tol=eps) # node1 hour 0 + assert math.isclose(meta[1]["price"], 21.5, abs_tol=eps) # node2 hour 0 + assert math.isclose(meta[2]["price"], 32, abs_tol=eps) # node3 hour 0 + + flows_df = pd.Series(flows).unstack() + assert math.isclose(flows_df.loc[products[0][0], "line_1_2"], 200, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][0], "line_1_3"], 5000, abs_tol=eps) + assert math.isclose(flows_df.loc[products[0][0], "line_2_3"], 4800, abs_tol=eps)