From 5073260a0faf2e4f4b3141139b6828fcdb74f653 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 13:23:09 +0200 Subject: [PATCH 001/141] Delete ToDo --- edisgo/flex_opt/reinforce_grid.py | 1 - 1 file changed, 1 deletion(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index eaf387107..89a0a6515 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -905,7 +905,6 @@ def enhanced_reinforce_grid( logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") except (ValueError, RuntimeError, exceptions.MaximumIterationError): logger.warning(f"Initial mode 'lv' reinforcement for {lv_grid} failed.") - # ToDo catch convergence reinforcement versuchen try: logger.info("Try initial enhanced reinforcement.") From 4f6fe13712fdb323ef57d121f3497f1246972408 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 14:22:38 +0200 Subject: [PATCH 002/141] Fix use same time index --- edisgo/flex_opt/check_tech_constraints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/flex_opt/check_tech_constraints.py b/edisgo/flex_opt/check_tech_constraints.py index 76e1414f7..ccf7b52c6 100644 --- a/edisgo/flex_opt/check_tech_constraints.py +++ b/edisgo/flex_opt/check_tech_constraints.py @@ -709,7 +709,7 @@ def stations_relative_load(edisgo_obj, grids=None): except Exception: pass - return loading / allowed_loading.loc[:, loading.columns] + return loading / allowed_loading.loc[loading.index, loading.columns] def components_relative_load(edisgo_obj, n_minus_one=False): From 8c6e57a082649ccc902e0c2a09beb18859f083e4 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 14:25:08 +0200 Subject: [PATCH 003/141] Allow selecting most critical time steps from subset of time steps --- edisgo/tools/temporal_complexity_reduction.py | 26 ++++++++++++++++--- .../test_temporal_complexity_reduction.py | 14 +++++----- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index d4c5a34e6..048b93017 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -396,7 +396,7 @@ def _scored_most_critical_voltage_issues_time_interval( return time_intervals_df -def _troubleshooting_mode(edisgo_obj): +def _troubleshooting_mode(edisgo_obj, timesteps=None): """ Handles non-convergence issues in power flow by iteratively reducing load and feed-in until the power flow converges. @@ -404,10 +404,21 @@ def _troubleshooting_mode(edisgo_obj): Load and feed-in is reduced in steps of 10% down to 20% of the original load and feed-in. The most critical time intervals / time steps can then be determined based on the power flow results with the reduced load and feed-in. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + The eDisGo API object + timesteps : :pandas:`pandas.DatetimeIndex` or \ + :pandas:`pandas.Timestamp` + Timesteps specifies from which time steps to select most critical ones. It + defaults to None in which case all time steps in + :attr:`~.network.timeseries.TimeSeries.timeindex` are used. + """ try: logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze() + edisgo_obj.analyze(timesteps=timesteps) # Exception is used, as non-convergence can also lead to RuntimeError, not only # ValueError except Exception: @@ -421,6 +432,7 @@ def _troubleshooting_mode(edisgo_obj): for fraction in np.arange(0.8, 0.0, step=-0.1): try: edisgo_obj.analyze( + timesteps=timesteps, troubleshooting_mode="iteration", range_start=fraction, range_num=1, @@ -615,6 +627,7 @@ def get_most_critical_time_intervals( def get_most_critical_time_steps( edisgo_obj: EDisGo, + timesteps=None, num_steps_loading=None, num_steps_voltage=None, percentage: float = 1.0, @@ -627,6 +640,11 @@ def get_most_critical_time_steps( ----------- edisgo_obj : :class:`~.EDisGo` The eDisGo API object + timesteps : :pandas:`pandas.DatetimeIndex` or \ + :pandas:`pandas.Timestamp` + Timesteps specifies from which time steps to select most critical ones. It + defaults to None in which case all time steps in + :attr:`~.network.timeseries.TimeSeries.timeindex` are used. num_steps_loading : int The number of most critical overloading events to select. If None, `percentage` is used. Default: None. @@ -654,10 +672,10 @@ def get_most_critical_time_steps( """ # Run power flow if use_troubleshooting_mode: - edisgo_obj = _troubleshooting_mode(edisgo_obj) + edisgo_obj = _troubleshooting_mode(edisgo_obj, timesteps=timesteps) else: logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze() + edisgo_obj.analyze(timesteps=timesteps) # Select most critical steps based on current violations loading_scores = _scored_most_critical_loading(edisgo_obj) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 05db1238d..4664f17fd 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -32,7 +32,6 @@ def setup_class(self): self.edisgo.analyze() def test__scored_most_critical_loading(self): - ts_crit = temp_red._scored_most_critical_loading(self.edisgo) assert len(ts_crit) == 180 @@ -40,7 +39,6 @@ def test__scored_most_critical_loading(self): assert np.isclose(ts_crit.iloc[-1], 1.14647) def test__scored_most_critical_voltage_issues(self): - ts_crit = temp_red._scored_most_critical_voltage_issues(self.edisgo) assert len(ts_crit) == 120 @@ -48,14 +46,20 @@ def test__scored_most_critical_voltage_issues(self): assert np.isclose(ts_crit.iloc[-1], 0.01062258) def test_get_most_critical_time_steps(self): - ts_crit = temp_red.get_most_critical_time_steps( self.edisgo, num_steps_loading=2, num_steps_voltage=2 ) assert len(ts_crit) == 3 - def test__scored_most_critical_loading_time_interval(self): + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + num_steps_loading=2, + num_steps_voltage=2, + timesteps=self.edisgo.timeseries.timeindex[:24], + ) + assert len(ts_crit) == 2 + def test__scored_most_critical_loading_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_loading_time_interval(self.edisgo, 24) assert len(ts_crit) == 9 @@ -82,7 +86,6 @@ def test__scored_most_critical_loading_time_interval(self): assert ts_crit.loc[0, "percentage_max_overloaded_components"] == 1 def test__scored_most_critical_voltage_issues_time_interval(self): - # test with default values ts_crit = temp_red._scored_most_critical_voltage_issues_time_interval( self.edisgo, 24 @@ -107,7 +110,6 @@ def test__scored_most_critical_voltage_issues_time_interval(self): assert np.isclose(ts_crit.loc[0, "percentage_buses_max_voltage_deviation"], 1.0) def test_get_most_critical_time_intervals(self): - self.edisgo.timeseries.timeindex = self.edisgo.timeseries.timeindex[:25] self.edisgo.timeseries.scale_timeseries(p_scaling_factor=5, q_scaling_factor=5) steps = temp_red.get_most_critical_time_intervals( From 475a2f3cd5b29295121aa3bee6df99b8effc2b6f Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 14:32:10 +0200 Subject: [PATCH 004/141] Change how reduced_analysis is set --- edisgo/edisgo.py | 12 +++++++----- edisgo/flex_opt/reinforce_grid.py | 22 ++++++++++++++-------- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index f0edd32b6..28b3d4baf 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1196,6 +1196,7 @@ def _scale_timeseries(pypsa_network_copy, fraction): def reinforce( self, timesteps_pfa: str | pd.DatetimeIndex | pd.Timestamp | None = None, + reduced_analysis: bool = False, copy_grid: bool = False, max_while_iterations: int = 20, split_voltage_band: bool = True, @@ -1237,14 +1238,15 @@ def reinforce( time steps. If your time series already represents the worst-case, keep the default value of None because finding the worst-case snapshots takes some time. - * 'reduced_analysis' - Reinforcement is conducted for all time steps at which at least one - branch shows its highest overloading or one bus shows its highest voltage - violation. * :pandas:`pandas.DatetimeIndex` or \ :pandas:`pandas.Timestamp` Use this option to explicitly choose which time steps to consider. - + reduced_analysis : bool + If True, reinforcement is conducted for all time steps at which at least + one branch shows its highest overloading or one bus shows its highest + voltage violation. Time steps to consider are specified through parameter + `timesteps_pfa`. If False, all time steps in parameter `timesteps_pfa` + are used. Default: False. copy_grid : bool If True, reinforcement is conducted on a copied grid and discarded. Default: False. diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 89a0a6515..27fe6ff20 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -25,6 +25,7 @@ def reinforce_grid( edisgo: EDisGo, timesteps_pfa: str | pd.DatetimeIndex | pd.Timestamp | None = None, + reduced_analysis: bool = False, max_while_iterations: int = 20, split_voltage_band: bool = True, mode: str | None = None, @@ -47,6 +48,10 @@ def reinforce_grid( timesteps_pfa specifies for which time steps power flow analysis is conducted. See parameter `timesteps_pfa` in function :attr:`~.EDisGo.reinforce` for more information. + reduced_analysis : bool + Specifies, whether to run reinforcement on a subset of time steps that are most + critical. See parameter `reduced_analysis` in function + :attr:`~.EDisGo.reinforce` for more information. max_while_iterations : int Maximum number of times each while loop is conducted. Default: 20. split_voltage_band : bool @@ -139,14 +144,6 @@ def reinforce_grid( snapshots["min_residual_load"], ] ).dropna() - elif isinstance(timesteps_pfa, str) and timesteps_pfa == "reduced_analysis": - timesteps_pfa = get_most_critical_time_steps( - edisgo, - num_steps_loading=kwargs.get("num_steps_loading", None), - num_steps_voltage=kwargs.get("num_steps_voltage", None), - percentage=kwargs.get("percentage", 1.0), - use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), - ) # if timesteps_pfa is not of type datetime or does not contain # datetimes throw an error elif not isinstance(timesteps_pfa, datetime.datetime): @@ -160,6 +157,15 @@ def reinforce_grid( f"Input {timesteps_pfa} for timesteps_pfa is not valid." ) + if reduced_analysis: + timesteps_pfa = get_most_critical_time_steps( + edisgo, + timesteps=timesteps_pfa, + num_steps_loading=kwargs.get("num_steps_loading", None), + num_steps_voltage=kwargs.get("num_steps_voltage", None), + percentage=kwargs.get("percentage", 1.0), + use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), + ) iteration_step = 1 lv_grid_id = kwargs.get("lv_grid_id", None) scale_timeseries = kwargs.get("scale_timeseries", None) From b7886fc6bbc2eb020796c393d77d4d85bcf04204 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 14:38:22 +0200 Subject: [PATCH 005/141] More changes to change how reduced analysis is set --- edisgo/edisgo.py | 9 +++++---- edisgo/flex_opt/reinforce_grid.py | 8 ++++---- tests/flex_opt/test_reinforce_grid.py | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 28b3d4baf..360f8118f 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1303,20 +1303,20 @@ def reinforce( reinforce MV/LV stations for LV worst-cases. Default: False. num_steps_loading : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical overloading events to consider. If None, `percentage` is used. Default: None. num_steps_voltage : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical voltage issues to select. If None, `percentage` is used. Default: None. percentage : float - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the percentage of most critical time steps to select. The default is 1.0, in which case all most critical time steps are selected. Default: 1.0. use_troubleshooting_mode : bool - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify how to handle non-convergence issues in the power flow analysis. If set to True, non-convergence issues are tried to be circumvented by reducing load and feed-in until the power flow converges. @@ -1409,6 +1409,7 @@ def reinforce( func( edisgo_obj, + reduced_analysis=reduced_analysis, max_while_iterations=max_while_iterations, split_voltage_band=split_voltage_band, without_generator_import=without_generator_import, diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 27fe6ff20..f7ff65133 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -89,20 +89,20 @@ def reinforce_grid( reinforce MV/LV stations for LV worst-cases. Default: False. num_steps_loading : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical overloading events to consider. If None, `percentage` is used. Default: None. num_steps_voltage : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical voltage issues to select. If None, `percentage` is used. Default: None. percentage : float - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the percentage of most critical time steps to select. The default is 1.0, in which case all most critical time steps are selected. Default: 1.0. use_troubleshooting_mode : bool - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify how to handle non-convergence issues in the power flow analysis. See parameter `use_troubleshooting_mode` in function :attr:`~.EDisGo.reinforce` for more information. Default: True. diff --git a/tests/flex_opt/test_reinforce_grid.py b/tests/flex_opt/test_reinforce_grid.py index dd4ca4cb4..ad1b296a7 100644 --- a/tests/flex_opt/test_reinforce_grid.py +++ b/tests/flex_opt/test_reinforce_grid.py @@ -58,7 +58,7 @@ def test_reinforce_grid(self): # test reduced analysis res_reduced = reinforce_grid( edisgo=copy.deepcopy(self.edisgo), - timesteps_pfa="reduced_analysis", + reduced_analysis=True, num_steps_loading=4, ) assert_frame_equal( From 43552cfd726e3f714fa26784f08dc09b72328093 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 14:54:54 +0200 Subject: [PATCH 006/141] Change LV reniforcement --- edisgo/flex_opt/reinforce_grid.py | 67 ++++++++++++++++++++++++------- 1 file changed, 53 insertions(+), 14 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index f7ff65133..7f4fe80d8 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -883,7 +883,7 @@ def enhanced_reinforce_grid( logger.info("Run initial grid reinforcement for single LV grids.") for lv_grid in list(edisgo_object.topology.mv_grid.lv_grids): logger.info(f"Check initial convergence for {lv_grid=}.") - _, ts_not_converged = edisgo_object.analyze( + ts_converged, ts_not_converged = edisgo_object.analyze( mode="lv", raise_not_converged=False, lv_grid_id=lv_grid.id ) if len(ts_not_converged) > 0: @@ -902,12 +902,31 @@ def enhanced_reinforce_grid( ) try: logger.info(f"Try initial mode 'lv' reinforcement for {lv_grid=}.") - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=True, - **kwargs, - ) + if len(ts_not_converged) > 0: + # if there are time steps that did not converge, run reinforcement + # first on converged time steps + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=False, + timesteps_pfa=ts_converged, + **kwargs, + ) + # run reinforcement again in catch-convergence mode with all time steps + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=True, + **kwargs, + ) + else: + # if all time steps converged, run normal reinforcement + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=False, + **kwargs, + ) logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") except (ValueError, RuntimeError, exceptions.MaximumIterationError): logger.warning(f"Initial mode 'lv' reinforcement for {lv_grid} failed.") @@ -940,7 +959,7 @@ def enhanced_reinforce_grid( for lv_grid in list(edisgo_object.topology.mv_grid.lv_grids): logger.info(f"Check convergence for {lv_grid=}.") - _, ts_not_converged = edisgo_object.analyze( + ts_converged, ts_not_converged = edisgo_object.analyze( mode="lv", raise_not_converged=False, lv_grid_id=lv_grid.id ) if len(ts_not_converged) > 0: @@ -961,12 +980,32 @@ def enhanced_reinforce_grid( ) try: logger.info(f"Try mode 'lv' reinforcement for {lv_grid=}.") - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=True, - **kwargs, - ) + if len(ts_not_converged) > 0: + # if there are time steps that did not converge, run reinforcement + # first on converged time steps + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=False, + timesteps_pfa=ts_converged, + **kwargs, + ) + # run reinforcement again in catch-convergence mode with all time + # steps + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=True, + **kwargs, + ) + else: + # if all time steps converged, run normal reinforcement + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=False, + **kwargs, + ) logger.info(f"Mode 'lv' reinforcement for {lv_grid} successful.") except (ValueError, RuntimeError, exceptions.MaximumIterationError): logger.info(f"Mode 'lv' reinforcement for {lv_grid} failed.") From 0f15fab11f4b9ede77b75f64b736d45420bfe0e0 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 18:00:00 +0200 Subject: [PATCH 007/141] Fix type hinting --- edisgo/edisgo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 360f8118f..4c5e92979 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -998,7 +998,7 @@ def analyze( range_num: int = 10, scale_timeseries: float | None = None, **kwargs, - ) -> tuple[pd.DataFrame, pd.DataFrame]: + ) -> tuple[pd.DatetimeIndex, pd.DatetimeIndex]: """ Conducts a static, non-linear power flow analysis. From b17d7141636bfe1770c0bfa176d9b3a3e1da770e Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 18:01:21 +0200 Subject: [PATCH 008/141] Bug fix hand analyze parameters to get_most_critical_time_steps --- edisgo/flex_opt/reinforce_grid.py | 22 ++++--- edisgo/tools/temporal_complexity_reduction.py | 57 ++++++++++++++++--- 2 files changed, 63 insertions(+), 16 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 7f4fe80d8..c6f88b947 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -157,15 +157,6 @@ def reinforce_grid( f"Input {timesteps_pfa} for timesteps_pfa is not valid." ) - if reduced_analysis: - timesteps_pfa = get_most_critical_time_steps( - edisgo, - timesteps=timesteps_pfa, - num_steps_loading=kwargs.get("num_steps_loading", None), - num_steps_voltage=kwargs.get("num_steps_voltage", None), - percentage=kwargs.get("percentage", 1.0), - use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), - ) iteration_step = 1 lv_grid_id = kwargs.get("lv_grid_id", None) scale_timeseries = kwargs.get("scale_timeseries", None) @@ -176,6 +167,19 @@ def reinforce_grid( else: analyze_mode = mode + if reduced_analysis: + timesteps_pfa = get_most_critical_time_steps( + edisgo, + mode=analyze_mode, + timesteps=timesteps_pfa, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + num_steps_loading=kwargs.get("num_steps_loading", None), + num_steps_voltage=kwargs.get("num_steps_voltage", None), + percentage=kwargs.get("percentage", 1.0), + use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), + ) + edisgo.analyze( mode=analyze_mode, timesteps=timesteps_pfa, diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 048b93017..69bf70afc 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -396,7 +396,13 @@ def _scored_most_critical_voltage_issues_time_interval( return time_intervals_df -def _troubleshooting_mode(edisgo_obj, timesteps=None): +def _troubleshooting_mode( + edisgo_obj, + mode=None, + timesteps=None, + lv_grid_id=None, + scale_timeseries=None, +): """ Handles non-convergence issues in power flow by iteratively reducing load and feed-in until the power flow converges. @@ -409,16 +415,31 @@ def _troubleshooting_mode(edisgo_obj, timesteps=None): ----------- edisgo_obj : :class:`~.EDisGo` The eDisGo API object + mode : str or None + Allows to toggle between power flow analysis for the whole network or just + the MV or one LV grid. See parameter `mode` in function + :attr:`~.EDisGo.analyze` for more information. timesteps : :pandas:`pandas.DatetimeIndex` or \ :pandas:`pandas.Timestamp` Timesteps specifies from which time steps to select most critical ones. It defaults to None in which case all time steps in :attr:`~.network.timeseries.TimeSeries.timeindex` are used. + lv_grid_id : int or str + ID (e.g. 1) or name (string representation, e.g. "LVGrid_1") of LV grid + to analyze in case mode is 'lv'. Default: None. + scale_timeseries : float or None + See parameter `scale_timeseries` in function :attr:`~.EDisGo.analyze` for more + information. """ try: logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze(timesteps=timesteps) + edisgo_obj.analyze( + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) # Exception is used, as non-convergence can also lead to RuntimeError, not only # ValueError except Exception: @@ -429,13 +450,17 @@ def _troubleshooting_mode(edisgo_obj, timesteps=None): "not all time steps converged. Power flow is run again with reduced " "network load." ) - for fraction in np.arange(0.8, 0.0, step=-0.1): + if isinstance(scale_timeseries, float): + iter_start = scale_timeseries - 0.1 + else: + iter_start = 0.8 + for fraction in np.arange(iter_start, 0.0, step=-0.1): try: edisgo_obj.analyze( + mode=mode, timesteps=timesteps, - troubleshooting_mode="iteration", - range_start=fraction, - range_num=1, + lv_grid_id=lv_grid_id, + scale_timeseries=fraction, ) logger.info( f"Power flow fully converged for a reduction factor " @@ -627,7 +652,10 @@ def get_most_critical_time_intervals( def get_most_critical_time_steps( edisgo_obj: EDisGo, + mode=None, timesteps=None, + lv_grid_id=None, + scale_timeseries=None, num_steps_loading=None, num_steps_voltage=None, percentage: float = 1.0, @@ -640,11 +668,21 @@ def get_most_critical_time_steps( ----------- edisgo_obj : :class:`~.EDisGo` The eDisGo API object + mode : str or None + Allows to toggle between power flow analysis for the whole network or just + the MV or one LV grid. See parameter `mode` in function + :attr:`~.EDisGo.analyze` for more information. timesteps : :pandas:`pandas.DatetimeIndex` or \ :pandas:`pandas.Timestamp` Timesteps specifies from which time steps to select most critical ones. It defaults to None in which case all time steps in :attr:`~.network.timeseries.TimeSeries.timeindex` are used. + lv_grid_id : int or str + ID (e.g. 1) or name (string representation, e.g. "LVGrid_1") of LV grid + to analyze in case mode is 'lv'. Default: None. + scale_timeseries : float or None + See parameter `scale_timeseries` in function :attr:`~.EDisGo.analyze` for more + information. num_steps_loading : int The number of most critical overloading events to select. If None, `percentage` is used. Default: None. @@ -675,7 +713,12 @@ def get_most_critical_time_steps( edisgo_obj = _troubleshooting_mode(edisgo_obj, timesteps=timesteps) else: logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze(timesteps=timesteps) + edisgo_obj.analyze( + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) # Select most critical steps based on current violations loading_scores = _scored_most_critical_loading(edisgo_obj) From b54ebc986fbb649ee158468b399e51e9076c1123 Mon Sep 17 00:00:00 2001 From: birgits Date: Sun, 22 Oct 2023 18:11:13 +0200 Subject: [PATCH 009/141] Allow not running an inital analyze when using reduced analysis --- edisgo/edisgo.py | 5 ++++ edisgo/flex_opt/reinforce_grid.py | 8 ++++++ edisgo/tools/temporal_complexity_reduction.py | 28 ++++++++++++------- 3 files changed, 31 insertions(+), 10 deletions(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 4c5e92979..e41ba99c8 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1323,6 +1323,11 @@ def reinforce( The most critical time steps are then determined based on the power flow results with the reduced load and feed-in. If False, an error will be raised in case time steps do not converge. Default: True. + run_initial_analyze : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to run an initial analyze to determine most + critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. Returns -------- diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index c6f88b947..d33bbf902 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -106,6 +106,11 @@ def reinforce_grid( to specify how to handle non-convergence issues in the power flow analysis. See parameter `use_troubleshooting_mode` in function :attr:`~.EDisGo.reinforce` for more information. Default: True. + run_initial_analyze : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to run an initial analyze to determine most + critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. Returns ------- @@ -178,6 +183,7 @@ def reinforce_grid( num_steps_voltage=kwargs.get("num_steps_voltage", None), percentage=kwargs.get("percentage", 1.0), use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), + run_initial_analyze=kwargs.get("run_initial_analyze", True), ) edisgo.analyze( @@ -929,6 +935,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, + run_initial_analyze=False, **kwargs, ) logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") @@ -1008,6 +1015,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, + run_initial_analyze=False, **kwargs, ) logger.info(f"Mode 'lv' reinforcement for {lv_grid} successful.") diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 69bf70afc..9703a157e 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -660,6 +660,7 @@ def get_most_critical_time_steps( num_steps_voltage=None, percentage: float = 1.0, use_troubleshooting_mode=True, + run_initial_analyze=True, ) -> pd.DatetimeIndex: """ Get the time steps with the most critical overloading and voltage issues. @@ -700,6 +701,10 @@ def get_most_critical_time_steps( are then determined based on the power flow results with the reduced load and feed-in. If False, an error will be raised in case time steps do not converge. Default: True. + run_initial_analyze : bool + This parameter can be used to specify whether to run an initial analyze to + determine most critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. Returns -------- @@ -709,16 +714,19 @@ def get_most_critical_time_steps( """ # Run power flow - if use_troubleshooting_mode: - edisgo_obj = _troubleshooting_mode(edisgo_obj, timesteps=timesteps) - else: - logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze( - mode=mode, - timesteps=timesteps, - lv_grid_id=lv_grid_id, - scale_timeseries=scale_timeseries, - ) + if run_initial_analyze: + if use_troubleshooting_mode: + edisgo_obj = _troubleshooting_mode(edisgo_obj, timesteps=timesteps) + else: + logger.debug( + "Running initial power flow for temporal complexity reduction." + ) + edisgo_obj.analyze( + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) # Select most critical steps based on current violations loading_scores = _scored_most_critical_loading(edisgo_obj) From 70748ce57d9c418dd8b3a08beea2e082170d8e8c Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 16:25:14 +0200 Subject: [PATCH 010/141] Quick bug fix --- edisgo/flex_opt/reinforce_grid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index d33bbf902..a5eaa8976 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -935,7 +935,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, - run_initial_analyze=False, + run_initial_analyze=True, **kwargs, ) logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") @@ -1015,7 +1015,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, - run_initial_analyze=False, + run_initial_analyze=True, **kwargs, ) logger.info(f"Mode 'lv' reinforcement for {lv_grid} successful.") From 708bdb3635c2797512cafaaff6aa61d815a156a1 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 16:58:31 +0200 Subject: [PATCH 011/141] Bug fix when analyze mode is lv, station node voltage cannot be checked --- edisgo/flex_opt/check_tech_constraints.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/edisgo/flex_opt/check_tech_constraints.py b/edisgo/flex_opt/check_tech_constraints.py index ccf7b52c6..70e9bbc42 100644 --- a/edisgo/flex_opt/check_tech_constraints.py +++ b/edisgo/flex_opt/check_tech_constraints.py @@ -1190,6 +1190,10 @@ def voltage_deviation_from_allowed_voltage_limits( v_dev_allowed_upper, v_dev_allowed_lower = allowed_voltage_limits( edisgo_obj, buses=buses, split_voltage_band=split_voltage_band ) + # the following is needed in case the power flow was only conducted for one LV + # grid - voltage at station node cannot be checked, warning is already raised + # in allowed_voltage_limits() + buses = v_dev_allowed_upper.columns # get voltages from power flow analysis v_mag_pu_pfa = edisgo_obj.results.v_res.loc[:, buses] From bd5c8888d29effb8848bfe32d9c130b91fada1f0 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 16:58:48 +0200 Subject: [PATCH 012/141] Change back as this wasn't the problem --- edisgo/flex_opt/reinforce_grid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index a5eaa8976..d33bbf902 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -935,7 +935,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, - run_initial_analyze=True, + run_initial_analyze=False, **kwargs, ) logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") @@ -1015,7 +1015,7 @@ def enhanced_reinforce_grid( mode="lv", lv_grid_id=lv_grid.id, catch_convergence_problems=False, - run_initial_analyze=True, + run_initial_analyze=False, **kwargs, ) logger.info(f"Mode 'lv' reinforcement for {lv_grid} successful.") From c832690c47415244d20f54be64f9a9fa9452038b Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 16:59:24 +0200 Subject: [PATCH 013/141] Add return in case no timesteps for reinforcement exist --- edisgo/flex_opt/reinforce_grid.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index d33bbf902..15c77d086 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -185,6 +185,9 @@ def reinforce_grid( use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), run_initial_analyze=kwargs.get("run_initial_analyze", True), ) + if len(timesteps_pfa) == 0: + logger.debug("Zero time steps for grid reinforcement.") + return edisgo.results edisgo.analyze( mode=analyze_mode, From 44a220333b2c35fde6a4c7c38f5de45fe2655e37 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 16:59:45 +0200 Subject: [PATCH 014/141] Bug fix add parameters when function is called --- edisgo/tools/temporal_complexity_reduction.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 9703a157e..8c29aa586 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -716,7 +716,13 @@ def get_most_critical_time_steps( # Run power flow if run_initial_analyze: if use_troubleshooting_mode: - edisgo_obj = _troubleshooting_mode(edisgo_obj, timesteps=timesteps) + edisgo_obj = _troubleshooting_mode( + edisgo_obj, + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) else: logger.debug( "Running initial power flow for temporal complexity reduction." From 51b53de5aa36b1d7f52919cac84d1a4591e5bd39 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 23 Oct 2023 18:33:22 +0200 Subject: [PATCH 015/141] Fix only use reduced analysis when power flow converges --- edisgo/flex_opt/reinforce_grid.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 15c77d086..57106235a 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -705,6 +705,7 @@ def reinforce(): edisgo, timesteps_pfa=selected_timesteps, scale_timeseries=set_scaling_factor, + use_troubleshooting_mode=troubleshooting_mode, **kwargs, ) converged = True @@ -720,11 +721,13 @@ def reinforce(): # Get the timesteps from kwargs and then remove it to set it later manually timesteps_pfa = kwargs.pop("timesteps_pfa", None) selected_timesteps = timesteps_pfa + troubleshooting_mode_set = kwargs.pop("troubleshooting_mode", True) # Initial try logger.info("Run initial reinforcement.") set_scaling_factor = 1.0 iteration = 0 + troubleshooting_mode = False converged = reinforce() if converged is False: logger.info("Initial reinforcement did not succeed.") @@ -756,6 +759,7 @@ def reinforce(): "reinforcement." ) selected_timesteps = converging_timesteps + troubleshooting_mode = troubleshooting_mode_set reinforce() # Run reinforcement for time steps that did not converge after initial reinforcement @@ -765,6 +769,7 @@ def reinforce(): "reinforcement." ) selected_timesteps = non_converging_timesteps + troubleshooting_mode = False converged = reinforce() if converged: @@ -798,6 +803,7 @@ def reinforce(): ) + highest_converged_scaling_factor logger.info(f"Try reinforcement with {set_scaling_factor=} at {iteration=}") + troubleshooting_mode = False converged = reinforce() if converged: logger.info( @@ -818,6 +824,7 @@ def reinforce(): if set_scaling_factor != 1: logger.info("Run final reinforcement.") selected_timesteps = timesteps_pfa + troubleshooting_mode = False reinforce() return edisgo.results From 1bd354b321c9624449e3cde2adaa6daa8ba27146 Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 24 Oct 2023 14:23:40 +0200 Subject: [PATCH 016/141] Add helper function temporarily --- edisgo/network/grids.py | 91 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/edisgo/network/grids.py b/edisgo/network/grids.py index e3cc3619b..978f5241b 100644 --- a/edisgo/network/grids.py +++ b/edisgo/network/grids.py @@ -350,6 +350,97 @@ def p_set_per_sector(self): """ return self.loads_df.groupby(["sector"]).sum()["p_set"] + def assign_length_to_grid_station(self): + """ + Assign length in km from each bus in the grid to the grid's station. + The length is written to column 'length_to_grid_station' in + :attr:`~.network.topology.Topology.buses_df`. + """ + buses_df = self._edisgo_obj.topology.buses_df + graph = self.graph + station = self.station.index[0] + + for bus in self.buses_df.index: + buses_df.at[bus, "length_to_grid_station"] = nx.shortest_path_length( + graph, source=station, target=bus, weight="length" + ) + + def assign_grid_feeder(self, mode: str = "grid_feeder"): + """ + Assigns MV or LV feeder to each bus and line, depending on the `mode`. + The feeder name is written to a new column `mv_feeder` or `grid_feeder`, + depending on the `mode`, in :class:`~.network.topology.Topology`'s + :attr:`~.network.topology.Topology.buses_df` and + :attr:`~.network.topology.Topology.lines_df`. + The MV feeder name corresponds to the name of the neighboring node of the + HV/MV station. The grid feeder name corresponds to the name of the neighboring + node of the respective grid's station. The feeder name of the source node, i.e. + the station, is set to "station_node". + Parameters + ---------- + mode : str + Specifies whether to assign MV or grid feeder. + If mode is "mv_feeder" the MV feeder the busses and lines are in are + determined. If mode is "grid_feeder" LV busses and lines are assigned the + LV feeder they are in and MV busses and lines are assigned the MV feeder + they are in. Default: "grid_feeder". + """ + buses_df = self._edisgo_obj.topology.buses_df + lines_df = self._edisgo_obj.topology.lines_df + + if mode == "grid_feeder": + graph = self.graph + station = self.station.index[0] + column_name = "grid_feeder" + elif mode == "mv_feeder": + graph = self._edisgo_obj.topology.to_graph() + station = self._edisgo_obj.topology.transformers_hvmv_df["bus1"][0] + column_name = "mv_feeder" + else: + raise ValueError("Choose an existing mode.") + + # get all buses in network and remove station to get separate subgraphs + graph_nodes = list(graph.nodes()) + graph_nodes.remove(station) + subgraph = graph.subgraph(graph_nodes) + + buses_df.at[station, column_name] = "station_node" + for neighbor in graph.neighbors(station): + # get all nodes in that feeder by doing a DFS in the disconnected + # subgraph starting from the node adjacent to the station `neighbor` + feeder_graph = nx.dfs_tree(subgraph, source=neighbor) + feeder_lines = set() + for node in feeder_graph.nodes(): + buses_df.at[node, column_name] = neighbor + feeder_lines.update( + {edge[2]["branch_name"] for edge in graph.edges(node, data=True)} + ) + lines_df.loc[lines_df.index.isin(feeder_lines), column_name] = neighbor + + def get_feeder_stats(self) -> pd.DataFrame: + """ + Generate statistics of the grid's feeders. + So far, only the feeder length is determined. + Returns + ------- + :pandas:`pandas.DataFrame` + Dataframe with feeder name in index and column 'length' containing the + respective feeder length in km. + """ + self.assign_grid_feeder() + self.assign_length_to_grid_station() + buses_df = self.buses_df + feeders = ( + buses_df.loc[ + buses_df["grid_feeder"] != "station_node", + ["grid_feeder", "length_to_grid_station"], + ] + .groupby("grid_feeder") + .max() + .rename(columns={"length_to_grid_station": "length"}) + ) + return feeders + def __repr__(self): return "_".join([self.__class__.__name__, str(self.id)]) From 9cc3e67211c04409b32b08630a4bd60211770b8f Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 24 Oct 2023 20:01:15 +0200 Subject: [PATCH 017/141] Bug fix --- edisgo/flex_opt/reinforce_grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 57106235a..0f7f171a5 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -185,7 +185,7 @@ def reinforce_grid( use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), run_initial_analyze=kwargs.get("run_initial_analyze", True), ) - if len(timesteps_pfa) == 0: + if timesteps_pfa is not None and len(timesteps_pfa) == 0: logger.debug("Zero time steps for grid reinforcement.") return edisgo.results From 74381c34edfd925d4ebc4cebe6d6a0c1c15d99be Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 18 Jan 2024 14:30:18 -0800 Subject: [PATCH 018/141] Revert "Change LV reniforcement" This reverts commit 43552cfd726e3f714fa26784f08dc09b72328093. --- edisgo/flex_opt/reinforce_grid.py | 69 +++++++------------------------ 1 file changed, 14 insertions(+), 55 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 0f7f171a5..a916f9aa7 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -903,7 +903,7 @@ def enhanced_reinforce_grid( logger.info("Run initial grid reinforcement for single LV grids.") for lv_grid in list(edisgo_object.topology.mv_grid.lv_grids): logger.info(f"Check initial convergence for {lv_grid=}.") - ts_converged, ts_not_converged = edisgo_object.analyze( + _, ts_not_converged = edisgo_object.analyze( mode="lv", raise_not_converged=False, lv_grid_id=lv_grid.id ) if len(ts_not_converged) > 0: @@ -922,32 +922,12 @@ def enhanced_reinforce_grid( ) try: logger.info(f"Try initial mode 'lv' reinforcement for {lv_grid=}.") - if len(ts_not_converged) > 0: - # if there are time steps that did not converge, run reinforcement - # first on converged time steps - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=False, - timesteps_pfa=ts_converged, - **kwargs, - ) - # run reinforcement again in catch-convergence mode with all time steps - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=True, - **kwargs, - ) - else: - # if all time steps converged, run normal reinforcement - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=False, - run_initial_analyze=False, - **kwargs, - ) + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=True, + **kwargs, + ) logger.info(f"Initial mode 'lv' reinforcement for {lv_grid} successful.") except (ValueError, RuntimeError, exceptions.MaximumIterationError): logger.warning(f"Initial mode 'lv' reinforcement for {lv_grid} failed.") @@ -980,7 +960,7 @@ def enhanced_reinforce_grid( for lv_grid in list(edisgo_object.topology.mv_grid.lv_grids): logger.info(f"Check convergence for {lv_grid=}.") - ts_converged, ts_not_converged = edisgo_object.analyze( + _, ts_not_converged = edisgo_object.analyze( mode="lv", raise_not_converged=False, lv_grid_id=lv_grid.id ) if len(ts_not_converged) > 0: @@ -1001,33 +981,12 @@ def enhanced_reinforce_grid( ) try: logger.info(f"Try mode 'lv' reinforcement for {lv_grid=}.") - if len(ts_not_converged) > 0: - # if there are time steps that did not converge, run reinforcement - # first on converged time steps - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=False, - timesteps_pfa=ts_converged, - **kwargs, - ) - # run reinforcement again in catch-convergence mode with all time - # steps - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=True, - **kwargs, - ) - else: - # if all time steps converged, run normal reinforcement - edisgo_object.reinforce( - mode="lv", - lv_grid_id=lv_grid.id, - catch_convergence_problems=False, - run_initial_analyze=False, - **kwargs, - ) + edisgo_object.reinforce( + mode="lv", + lv_grid_id=lv_grid.id, + catch_convergence_problems=True, + **kwargs, + ) logger.info(f"Mode 'lv' reinforcement for {lv_grid} successful.") except (ValueError, RuntimeError, exceptions.MaximumIterationError): logger.info(f"Mode 'lv' reinforcement for {lv_grid} failed.") From 4aff553ff5a77db84d845ef91aecf532f905c35e Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 18 Jan 2024 20:55:31 -0800 Subject: [PATCH 019/141] Fix test - setup class needs to be rerun --- tests/tools/test_temporal_complexity_reduction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 4664f17fd..9b760da7d 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -7,7 +7,7 @@ class TestTemporalComplexityReduction: - @classmethod + @pytest.fixture(autouse=True) def setup_class(self): self.edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) self.edisgo.set_time_series_worst_case_analysis() From db325bca2986f236d54ceebb9366de6e6cfb9e6a Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 19 Jan 2024 15:35:42 -0800 Subject: [PATCH 020/141] Get rid of redundant code and parameter documentation --- edisgo/tools/temporal_complexity_reduction.py | 211 +++++++++++------- 1 file changed, 128 insertions(+), 83 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 267362529..7f7dd8da9 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -42,7 +42,7 @@ def _scored_most_critical_loading(edisgo_obj: EDisGo) -> pd.Series: # Get lines that have violations crit_lines_score = relative_i_res[relative_i_res > 1] - # Get most critical timesteps per component + # Get most critical time steps per component crit_lines_score = ( (crit_lines_score[crit_lines_score == crit_lines_score.max()]) .dropna(how="all") @@ -115,23 +115,23 @@ def _scored_most_critical_loading_time_interval( The eDisGo API object time_steps_per_time_interval : int Amount of continuous time steps in an interval that violation is determined for. - Currently, these can only be multiples of 24. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. Default: 168. time_steps_per_day : int - Number of time steps in one day. In case of an hourly resolution this is 24. - As currently only an hourly resolution is possible, this value should always be - 24. + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. Default: 24. time_step_day_start : int - Time step of the day at which each interval should start. If you want it to - start at midnight, this should be set to 0. Default: 0. + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. overloading_factor : float Factor at which an overloading of a component is considered to be close enough - to the highest overloading of that component. This is used to determine the - number of components that reach their highest overloading in each time interval. - Per default, it is set to 0.95, which means that if the highest overloading of - a component is 2, it will be considered maximally overloaded at an overloading - of higher or equal to 2*0.95. + to the highest overloading of that component. See parameter + `overloading_factor` in :func:`~get_most_critical_time_intervals` for more + information. Default: 0.95. Returns @@ -171,52 +171,15 @@ def _scored_most_critical_loading_time_interval( costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) crit_lines_cost = crit_lines_score * costs - # Get highest overloading in each window for each component and sum it up - crit_timesteps = ( - crit_lines_cost.rolling( - window=int(time_steps_per_time_interval), closed="right" - ) - .max() - .sum(axis=1) - ) - # select each nth time window to only consider windows starting at a certain time - # of day and sort time intervals in descending order - # ToDo: To make function work for frequencies other than hourly, the following - # needs to be adapted to index based on time index instead of iloc - crit_timesteps = ( - crit_timesteps.iloc[int(time_steps_per_time_interval) - 1 :] - .iloc[time_step_day_start + 1 :: time_steps_per_day] - .sort_values(ascending=False) - ) - # move time index as rolling gives the end of the time interval, but we want the - # beginning - timesteps = crit_timesteps.index - pd.DateOffset( - hours=int(time_steps_per_time_interval) - ) - time_intervals = [ - pd.date_range( - start=timestep, periods=int(time_steps_per_time_interval), freq="h" - ) - for timestep in timesteps - ] - - # make dataframe with time steps in each time interval and the percentage of - # components that reach their maximum overloading - time_intervals_df = pd.DataFrame( - index=range(len(time_intervals)), - columns=["time_steps", "percentage_max_overloaded_components"], + time_intervals_df = _most_critical_time_interval( + costs_per_time_step=crit_lines_cost, + grid_issues_magnitude_df=crit_lines_score, + which="overloading", + deviation_factor=overloading_factor, + time_steps_per_time_interval=time_steps_per_time_interval, + time_steps_per_day=time_steps_per_day, + time_step_day_start=time_step_day_start, ) - time_intervals_df["time_steps"] = time_intervals - lines_no_max = crit_lines_score.columns.values - total_lines = len(lines_no_max) - max_per_line = crit_lines_score.max() - for i in range(len(time_intervals)): - # check if worst overloading of every line is included in time interval - max_per_line_ti = crit_lines_score.loc[time_intervals[i]].max() - time_intervals_df["percentage_max_overloaded_components"][i] = ( - len(max_per_line_ti[max_per_line_ti >= max_per_line * overloading_factor]) - / total_lines - ) return time_intervals_df @@ -245,24 +208,23 @@ def _scored_most_critical_voltage_issues_time_interval( The eDisGo API object time_steps_per_time_interval : int Amount of continuous time steps in an interval that violation is determined for. - Currently, these can only be multiples of 24. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. Default: 168. time_steps_per_day : int - Number of time steps in one day. In case of an hourly resolution this is 24. - As currently only an hourly resolution is possible, this value should always be - 24. + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. Default: 24. time_step_day_start : int - Time step of the day at which each interval should start. If you want it to - start at midnight, this should be set to 0. Default: 0. + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. voltage_deviation_factor : float Factor at which a voltage deviation at a bus is considered to be close enough - to the highest voltage deviation at that bus. This is used to determine the - number of buses that reach their highest voltage deviation in each time - interval. Per default, it is set to 0.95. This means that if the highest voltage - deviation at a bus is 0.2, it will be included in the determination of number - of buses that reach their maximum voltage deviation in a certain time interval - at a voltage deviation of higher or equal to 0.2*0.95. + to the highest voltage deviation at that bus. See parameter + `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more + information. Default: 0.95. Returns @@ -284,7 +246,7 @@ def _scored_most_critical_voltage_issues_time_interval( voltage_diff = check_tech_constraints.voltage_deviation_from_allowed_voltage_limits( edisgo_obj ) - voltage_diff = voltage_diff.abs()[voltage_diff.abs() > 0] + voltage_diff = voltage_diff[voltage_diff != 0.0].abs() # determine costs per feeder lv_station_buses = [ @@ -331,9 +293,92 @@ def _scored_most_critical_voltage_issues_time_interval( # weigh feeder voltage violation with costs per feeder voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() - # Get the highest voltage issues in each window for each feeder and sum it up + time_intervals_df = _most_critical_time_interval( + costs_per_time_step=voltage_diff_feeder, + grid_issues_magnitude_df=voltage_diff_copy, + which="voltage", + deviation_factor=voltage_deviation_factor, + time_steps_per_time_interval=time_steps_per_time_interval, + time_steps_per_day=time_steps_per_day, + time_step_day_start=time_step_day_start, + ) + + return time_intervals_df + + +def _most_critical_time_interval( + costs_per_time_step, + grid_issues_magnitude_df, + which, + deviation_factor=0.95, + time_steps_per_time_interval=168, + time_steps_per_day=24, + time_step_day_start=0, +): + """ + Helper function used in functions + :func:`~_scored_most_critical_loading_time_interval` and + :func:`~_scored_most_critical_voltage_issues_time_interval` + to get time intervals sorted by severity of grid issue. + + This function currently only works for an hourly resolution! + + Parameters + ----------- + costs_per_time_step : :pandas:`pandas.DataFrame` + Dataframe containing the estimated grid expansion costs per line or feeder. + Columns contain line or feeder names. + Index of the dataframe are all time steps power flow analysis + was conducted for of type :pandas:`pandas.Timestamp`. + grid_issues_magnitude_df : :pandas:`pandas.DataFrame` + Dataframe containing the relative overloading or voltage deviation per time + step in case of an overloading or voltage issue in that time step. + Columns contain line or bus names. + Index of the dataframe are all time steps power flow analysis + was conducted for of type :pandas:`pandas.Timestamp`. + which : str + Defines whether function is used to determine most critical time intervals for + voltage or overloading problems. Can either be "voltage" or "overloading". + deviation_factor : float + Factor at which a grid issue is considered to be close enough to the highest + grid issue. In case parameter `which` is "voltage", see parameter + `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more + information. In case parameter `which` is "overloading", see parameter + `overloading_factor` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0.95. + time_steps_per_time_interval : int + Amount of continuous time steps in an interval that violation is determined for. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. + Default: 168. + time_steps_per_day : int + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. + Default: 24. + time_step_day_start : int + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. + + Returns + -------- + :pandas:`pandas.DataFrame` + Contains time intervals in which grid expansion needs due to voltage issues + are detected. The time intervals are sorted descending + by the expected cumulated grid expansion costs, so that the time interval with + the highest expected costs corresponds to index 0. The time steps in the + respective time interval are given in column "time_steps" and the share + of buses for which the maximum voltage deviation is reached during the time + interval is given in column "percentage_buses_max_voltage_deviation". Each bus + is only considered once. That means if its maximum voltage deviation was + already considered in an earlier time interval, it is not considered again. + + """ + # get the highest issues in each window for each feeder and sum it up crit_timesteps = ( - voltage_diff_feeder.rolling( + costs_per_time_step.rolling( window=int(time_steps_per_time_interval), closed="right" ) .max() @@ -359,26 +404,26 @@ def _scored_most_critical_voltage_issues_time_interval( ] # make dataframe with time steps in each time interval and the percentage of - # buses that reach their maximum voltage deviation + # buses/branches that reach their maximum voltage deviation / overloading + if which == "voltage": + percentage = "percentage_buses_max_voltage_deviation" + else: + percentage = "percentage_max_overloaded_components" time_intervals_df = pd.DataFrame( index=range(len(time_intervals)), - columns=["time_steps", "percentage_buses_max_voltage_deviation"], + columns=["time_steps", percentage], ) time_intervals_df["time_steps"] = time_intervals - max_per_bus = voltage_diff_copy.max().fillna(0) - buses_no_max = max_per_bus.index.values - total_buses = len(buses_no_max) + max_per_bus = grid_issues_magnitude_df.max().fillna(0) + total_buses = len(grid_issues_magnitude_df.columns) for i in range(len(time_intervals)): # check if worst voltage deviation of every bus is included in time interval - max_per_bus_ti = voltage_diff_copy.loc[time_intervals[i]].max() - time_intervals_df["percentage_buses_max_voltage_deviation"][i] = ( - len( - max_per_bus_ti[max_per_bus_ti >= max_per_bus * voltage_deviation_factor] - ) + max_per_bus_ti = grid_issues_magnitude_df.loc[time_intervals[i]].max() + time_intervals_df[percentage][i] = ( + len(max_per_bus_ti[max_per_bus_ti >= max_per_bus * deviation_factor]) / total_buses ) - return time_intervals_df From 716c636fe461e607660866b7be2f25ca76825dc7 Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 19 Jan 2024 16:42:16 -0800 Subject: [PATCH 021/141] Allow weighting by costs or without weighting factor --- edisgo/tools/temporal_complexity_reduction.py | 159 +++++++++++------- 1 file changed, 102 insertions(+), 57 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 7f7dd8da9..abf743ba1 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -3,7 +3,6 @@ import logging import os -from copy import deepcopy from typing import TYPE_CHECKING import numpy as np @@ -97,12 +96,13 @@ def _scored_most_critical_loading_time_interval( time_steps_per_day=24, time_step_day_start=0, overloading_factor=0.95, + weigh_by_costs=True, ): """ Get time intervals sorted by severity of overloadings. - The overloading is weighed by the estimated expansion costs of each respective line - and transformer. + The overloading can weighed by the estimated expansion costs of each respective line + and transformer. See parameter `weigh_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -133,6 +133,11 @@ def _scored_most_critical_loading_time_interval( `overloading_factor` in :func:`~get_most_critical_time_intervals` for more information. Default: 0.95. + weigh_by_costs : bool + Defines whether overloading issues should be weighed by estimated grid expansion + costs or not. See parameter `weigh_by_costs` in + :func:`~get_most_critical_time_intervals` for more information. + Default: True. Returns -------- @@ -153,26 +158,29 @@ def _scored_most_critical_loading_time_interval( # Get lines that have violations and replace nan values with 0 crit_lines_score = relative_i_res[relative_i_res > 1].fillna(0) - # weight line violations with expansion costs - costs_lines = ( - line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) - ) - costs_trafos_lv = pd.Series( - index=[ - str(lv_grid) + "_station" - for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) - ], - data=edisgo_obj.config["costs_transformers"]["lv"], - ) - costs_trafos_mv = pd.Series( - index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], - data=edisgo_obj.config["costs_transformers"]["mv"], - ) - costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) - crit_lines_cost = crit_lines_score * costs + if weigh_by_costs: + # weigh line violations with expansion costs + costs_lines = ( + line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) + ) + costs_trafos_lv = pd.Series( + index=[ + str(lv_grid) + "_station" + for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) + ], + data=edisgo_obj.config["costs_transformers"]["lv"], + ) + costs_trafos_mv = pd.Series( + index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], + data=edisgo_obj.config["costs_transformers"]["mv"], + ) + costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) + crit_lines_weighed = crit_lines_score * costs + else: + crit_lines_weighed = crit_lines_score.copy() time_intervals_df = _most_critical_time_interval( - costs_per_time_step=crit_lines_cost, + costs_per_time_step=crit_lines_weighed, grid_issues_magnitude_df=crit_lines_score, which="overloading", deviation_factor=overloading_factor, @@ -190,12 +198,13 @@ def _scored_most_critical_voltage_issues_time_interval( time_steps_per_day=24, time_step_day_start=0, voltage_deviation_factor=0.95, + weigh_by_costs=True, ): """ Get time intervals sorted by severity of voltage issues. - The voltage issues are weighed by the estimated expansion costs in each respective - feeder. + The voltage issues can be weighed by the estimated expansion costs in each + respective feeder. See parameter `weigh_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -226,6 +235,11 @@ def _scored_most_critical_voltage_issues_time_interval( `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more information. Default: 0.95. + weigh_by_costs : bool + Defines whether voltage issues should be weighed by estimated grid expansion + costs or not. See parameter `weigh_by_costs` in + :func:`~get_most_critical_time_intervals` for more information. + Default: True. Returns -------- @@ -242,60 +256,61 @@ def _scored_most_critical_voltage_issues_time_interval( """ - # Get voltage deviation from allowed voltage limits + # get voltage deviation from allowed voltage limits voltage_diff = check_tech_constraints.voltage_deviation_from_allowed_voltage_limits( edisgo_obj ) - voltage_diff = voltage_diff[voltage_diff != 0.0].abs() - - # determine costs per feeder - lv_station_buses = [ - lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids - ] - costs_lines = ( - line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) - ) - costs_trafos_lv = pd.Series( - index=lv_station_buses, - data=edisgo_obj.config._data["costs_transformers"]["lv"], - ) - costs = pd.concat([costs_lines, costs_trafos_lv]) + voltage_diff = voltage_diff[voltage_diff != 0.0].abs().fillna(0) # set feeder using MV feeder for MV components and LV feeder for LV components edisgo_obj.topology.assign_feeders(mode="grid_feeder") # feeders of buses at MV/LV station's secondary sides are set to the name of the # station bus to have them as separate feeders + lv_station_buses = [ + lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids + ] edisgo_obj.topology.buses_df.loc[lv_station_buses, "grid_feeder"] = lv_station_buses - feeder_lines = edisgo_obj.topology.lines_df.grid_feeder - feeder_trafos_lv = pd.Series( - index=lv_station_buses, - data=lv_station_buses, - ) - feeder = pd.concat([feeder_lines, feeder_trafos_lv]) - costs_per_feeder = ( - pd.concat([costs.rename("costs"), feeder.rename("feeder")], axis=1) - .groupby(by="feeder")[["costs"]] - .sum() - ) - # check for every feeder if any of the buses within violate the allowed voltage # deviation, by grouping voltage_diff per feeder feeder_buses = edisgo_obj.topology.buses_df.grid_feeder columns = [feeder_buses.loc[col] for col in voltage_diff.columns] - voltage_diff_copy = deepcopy(voltage_diff).fillna(0) - voltage_diff.columns = columns + voltage_diff_feeder = voltage_diff.copy() + voltage_diff_feeder.columns = columns voltage_diff_feeder = ( - voltage_diff.transpose().reset_index().groupby(by="index").sum().transpose() + voltage_diff.transpose().reset_index().groupby(by="Bus").sum().transpose() ) voltage_diff_feeder[voltage_diff_feeder != 0] = 1 - # weigh feeder voltage violation with costs per feeder - voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() + if weigh_by_costs: + # determine costs per feeder + costs_lines = ( + line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) + ) + costs_trafos_lv = pd.Series( + index=lv_station_buses, + data=edisgo_obj.config._data["costs_transformers"]["lv"], + ) + costs = pd.concat([costs_lines, costs_trafos_lv]) + + feeder_lines = edisgo_obj.topology.lines_df.grid_feeder + feeder_trafos_lv = pd.Series( + index=lv_station_buses, + data=lv_station_buses, + ) + feeder = pd.concat([feeder_lines, feeder_trafos_lv]) + costs_per_feeder = ( + pd.concat([costs.rename("costs"), feeder.rename("feeder")], axis=1) + .groupby(by="feeder")[["costs"]] + .sum() + ) + + # weigh feeder voltage violation with costs per feeder + voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() time_intervals_df = _most_critical_time_interval( costs_per_time_step=voltage_diff_feeder, - grid_issues_magnitude_df=voltage_diff_copy, + grid_issues_magnitude_df=voltage_diff, which="voltage", deviation_factor=voltage_deviation_factor, time_steps_per_time_interval=time_steps_per_time_interval, @@ -524,11 +539,12 @@ def get_most_critical_time_intervals( use_troubleshooting_mode=True, overloading_factor=0.95, voltage_deviation_factor=0.95, + weigh_by_costs=True, ): """ Get time intervals sorted by severity of overloadings as well as voltage issues. - The overloading and voltage issues are weighed by the estimated expansion costs + The overloading and voltage issues can be weighed by the estimated expansion costs solving the issue would require. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and @@ -585,6 +601,33 @@ def get_most_critical_time_intervals( of buses that reach their maximum voltage deviation in a certain time interval at a voltage deviation of higher or equal to 0.2*0.95. Default: 0.95. + weigh_by_costs : bool + Defines whether overloading and voltage issues should be weighed by estimated + grid expansion costs or not. This can be done in order to take into account that + some grid issues are more relevant, as reinforcing a certain line or feeder will + be more expensive than another one. + + In case of voltage issues: + If True, the costs for each MV and LV feeder, as well as MV/LV station are + determined using the costs for earth work and new lines over the full length of + the feeder respectively for a new MV/LV station. In each time interval, the + estimated costs are only taken into account, in case there is a voltage issue + somewhere in the feeder. + The costs don't convey the actual costs but are an estimation, as + the real number of parallel lines needed is not determined and the whole feeder + length is used instead of the length over two-thirds of the feeder. + If False, the severity of each feeder's voltage issue is set to be the same. + + In case of overloading issues: + If True, the overloading of each line is multiplied by + the respective grid expansion costs of that line including costs for earth work + and one new line. + The costs don't convey the actual costs but are an estimation, as + the discrete needed number of parallel lines is not considered. + If False, only the relative overloading is used to determine the most relevant + time intervals. + + Default: True. Returns -------- @@ -626,6 +669,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, overloading_factor=overloading_factor, + weigh_by_costs=weigh_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(loading_scores) * percentage)) @@ -646,6 +690,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, voltage_deviation_factor=voltage_deviation_factor, + weigh_by_costs=weigh_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(voltage_scores) * percentage)) From 8d046b30e17d26aa72b3ea9d7053a258792ca512 Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 23 Jan 2024 19:36:17 -0800 Subject: [PATCH 022/141] Fix spelling --- edisgo/tools/temporal_complexity_reduction.py | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index abf743ba1..242731170 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -96,13 +96,13 @@ def _scored_most_critical_loading_time_interval( time_steps_per_day=24, time_step_day_start=0, overloading_factor=0.95, - weigh_by_costs=True, + weight_by_costs=True, ): """ Get time intervals sorted by severity of overloadings. - The overloading can weighed by the estimated expansion costs of each respective line - and transformer. See parameter `weigh_by_costs` for more information. + The overloading can weighted by the estimated expansion costs of each respective + line and transformer. See parameter `weight_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -133,9 +133,9 @@ def _scored_most_critical_loading_time_interval( `overloading_factor` in :func:`~get_most_critical_time_intervals` for more information. Default: 0.95. - weigh_by_costs : bool - Defines whether overloading issues should be weighed by estimated grid expansion - costs or not. See parameter `weigh_by_costs` in + weight_by_costs : bool + Defines whether overloading issues should be weighted by estimated grid + expansion costs or not. See parameter `weight_by_costs` in :func:`~get_most_critical_time_intervals` for more information. Default: True. @@ -158,8 +158,8 @@ def _scored_most_critical_loading_time_interval( # Get lines that have violations and replace nan values with 0 crit_lines_score = relative_i_res[relative_i_res > 1].fillna(0) - if weigh_by_costs: - # weigh line violations with expansion costs + if weight_by_costs: + # weight line violations with expansion costs costs_lines = ( line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) ) @@ -175,12 +175,12 @@ def _scored_most_critical_loading_time_interval( data=edisgo_obj.config["costs_transformers"]["mv"], ) costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) - crit_lines_weighed = crit_lines_score * costs + crit_lines_weighted = crit_lines_score * costs else: - crit_lines_weighed = crit_lines_score.copy() + crit_lines_weighted = crit_lines_score.copy() time_intervals_df = _most_critical_time_interval( - costs_per_time_step=crit_lines_weighed, + costs_per_time_step=crit_lines_weighted, grid_issues_magnitude_df=crit_lines_score, which="overloading", deviation_factor=overloading_factor, @@ -198,13 +198,13 @@ def _scored_most_critical_voltage_issues_time_interval( time_steps_per_day=24, time_step_day_start=0, voltage_deviation_factor=0.95, - weigh_by_costs=True, + weight_by_costs=True, ): """ Get time intervals sorted by severity of voltage issues. - The voltage issues can be weighed by the estimated expansion costs in each - respective feeder. See parameter `weigh_by_costs` for more information. + The voltage issues can be weighted by the estimated expansion costs in each + respective feeder. See parameter `weight_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -235,9 +235,9 @@ def _scored_most_critical_voltage_issues_time_interval( `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more information. Default: 0.95. - weigh_by_costs : bool - Defines whether voltage issues should be weighed by estimated grid expansion - costs or not. See parameter `weigh_by_costs` in + weight_by_costs : bool + Defines whether voltage issues should be weighted by estimated grid expansion + costs or not. See parameter `weight_by_costs` in :func:`~get_most_critical_time_intervals` for more information. Default: True. @@ -282,7 +282,7 @@ def _scored_most_critical_voltage_issues_time_interval( ) voltage_diff_feeder[voltage_diff_feeder != 0] = 1 - if weigh_by_costs: + if weight_by_costs: # determine costs per feeder costs_lines = ( line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) @@ -305,7 +305,7 @@ def _scored_most_critical_voltage_issues_time_interval( .sum() ) - # weigh feeder voltage violation with costs per feeder + # weight feeder voltage violation with costs per feeder voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() time_intervals_df = _most_critical_time_interval( @@ -539,12 +539,12 @@ def get_most_critical_time_intervals( use_troubleshooting_mode=True, overloading_factor=0.95, voltage_deviation_factor=0.95, - weigh_by_costs=True, + weight_by_costs=True, ): """ Get time intervals sorted by severity of overloadings as well as voltage issues. - The overloading and voltage issues can be weighed by the estimated expansion costs + The overloading and voltage issues can be weighted by the estimated expansion costs solving the issue would require. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and @@ -601,8 +601,8 @@ def get_most_critical_time_intervals( of buses that reach their maximum voltage deviation in a certain time interval at a voltage deviation of higher or equal to 0.2*0.95. Default: 0.95. - weigh_by_costs : bool - Defines whether overloading and voltage issues should be weighed by estimated + weight_by_costs : bool + Defines whether overloading and voltage issues should be weighted by estimated grid expansion costs or not. This can be done in order to take into account that some grid issues are more relevant, as reinforcing a certain line or feeder will be more expensive than another one. @@ -669,7 +669,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, overloading_factor=overloading_factor, - weigh_by_costs=weigh_by_costs, + weight_by_costs=weight_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(loading_scores) * percentage)) @@ -690,7 +690,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, voltage_deviation_factor=voltage_deviation_factor, - weigh_by_costs=weigh_by_costs, + weight_by_costs=weight_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(voltage_scores) * percentage)) From c2e45ad9982c6de278798093e649dce3c7d64936 Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 23 Jan 2024 19:36:50 -0800 Subject: [PATCH 023/141] Bugfix and remove time intervals without grid issues --- edisgo/tools/temporal_complexity_reduction.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 242731170..3453bf692 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -405,17 +405,18 @@ def _most_critical_time_interval( # needs to be adapted to index based on time index instead of iloc crit_timesteps = ( crit_timesteps.iloc[int(time_steps_per_time_interval) - 1 :] - .iloc[time_step_day_start + 1 :: time_steps_per_day] + .iloc[time_step_day_start::time_steps_per_day] .sort_values(ascending=False) ) - timesteps = crit_timesteps.index - pd.DateOffset( - hours=int(time_steps_per_time_interval) - ) + # get time steps in each time interval - these are set up setting the given time + # step to be the end of the respective time interval, as rolling() function gives + # the time step at the end of the considered time interval; further, only time + # intervals with a sum greater than zero are considered, as zero values mean, that + # there is no grid issue in the respective time interval time_intervals = [ - pd.date_range( - start=timestep, periods=int(time_steps_per_time_interval), freq="h" - ) - for timestep in timesteps + pd.date_range(end=timestep, periods=int(time_steps_per_time_interval), freq="h") + for timestep in crit_timesteps.index + if crit_timesteps[timestep] != 0.0 ] # make dataframe with time steps in each time interval and the percentage of From edd4ba3db93c10d7c74dd012f4ee01d3a0a3fe7a Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 23 Jan 2024 19:37:40 -0800 Subject: [PATCH 024/141] Adapt tests to bugfix and new parameter --- .../test_temporal_complexity_reduction.py | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 9b760da7d..91fb18feb 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -62,10 +62,10 @@ def test_get_most_critical_time_steps(self): def test__scored_most_critical_loading_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_loading_time_interval(self.edisgo, 24) - assert len(ts_crit) == 9 + assert len(ts_crit) == 10 assert ( ts_crit.loc[0, "time_steps"] - == pd.date_range("1/5/2018", periods=24, freq="H") + == pd.date_range("1/8/2018", periods=24, freq="H") ).all() assert np.isclose( ts_crit.loc[0, "percentage_max_overloaded_components"], 0.96479 @@ -85,29 +85,41 @@ def test__scored_most_critical_loading_time_interval(self): ).all() assert ts_crit.loc[0, "percentage_max_overloaded_components"] == 1 + # test without weighting by costs + ts_crit = temp_red._scored_most_critical_loading_time_interval( + self.edisgo, + 48, + weight_by_costs=False, + ) + assert len(ts_crit) == 9 + assert ( + ts_crit.loc[0, "time_steps"] + == pd.date_range("1/5/2018 0:00", periods=48, freq="H") + ).all() + def test__scored_most_critical_voltage_issues_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_voltage_issues_time_interval( self.edisgo, 24 ) - assert len(ts_crit) == 9 + assert len(ts_crit) == 5 assert ( ts_crit.loc[0, "time_steps"] == pd.date_range("1/1/2018", periods=24, freq="H") ).all() - assert np.isclose(ts_crit.loc[0, "percentage_buses_max_voltage_deviation"], 1.0) - assert np.isclose(ts_crit.loc[1, "percentage_buses_max_voltage_deviation"], 1.0) + assert ( + ts_crit.loc[:, "percentage_buses_max_voltage_deviation"].values == 1.0 + ).all() # test with non-default values ts_crit = temp_red._scored_most_critical_voltage_issues_time_interval( - self.edisgo, 24, time_step_day_start=4, voltage_deviation_factor=0.5 + self.edisgo, 72, time_step_day_start=4, weight_by_costs=False ) - assert len(ts_crit) == 9 + assert len(ts_crit) == 5 assert ( ts_crit.loc[0, "time_steps"] - == pd.date_range("1/1/2018 4:00", periods=24, freq="H") + == pd.date_range("1/1/2018 4:00", periods=72, freq="H") ).all() - assert np.isclose(ts_crit.loc[0, "percentage_buses_max_voltage_deviation"], 1.0) def test_get_most_critical_time_intervals(self): self.edisgo.timeseries.timeindex = self.edisgo.timeseries.timeindex[:25] From 0595d573156d9a0861d9bee796d369e4451a5d9e Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 23 Jan 2024 19:51:56 -0800 Subject: [PATCH 025/141] Adapt test --- .../tools/test_temporal_complexity_reduction.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 91fb18feb..03a41b005 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -122,11 +122,17 @@ def test__scored_most_critical_voltage_issues_time_interval(self): ).all() def test_get_most_critical_time_intervals(self): - self.edisgo.timeseries.timeindex = self.edisgo.timeseries.timeindex[:25] - self.edisgo.timeseries.scale_timeseries(p_scaling_factor=5, q_scaling_factor=5) + self.edisgo.timeseries.scale_timeseries(p_scaling_factor=2, q_scaling_factor=2) steps = temp_red.get_most_critical_time_intervals( - self.edisgo, time_steps_per_time_interval=24 + self.edisgo, time_steps_per_time_interval=24, percentage=0.5 ) - assert len(steps) == 1 - assert len(steps.columns) == 4 + assert len(steps) == 5 + assert ( + steps.loc[0, "time_steps_overloading"] + == pd.date_range("1/8/2018", periods=24, freq="H") + ).all() + assert ( + steps.loc[0, "time_steps_voltage_issues"] + == pd.date_range("1/1/2018", periods=24, freq="H") + ).all() From 4979e3bb8370dce0d8716f155b87b4f4526d9258 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 29 Jan 2024 14:19:28 -0800 Subject: [PATCH 026/141] Move approximation of costs to separate functions and add tests --- edisgo/tools/temporal_complexity_reduction.py | 129 ++++++++++++------ .../test_temporal_complexity_reduction.py | 12 ++ 2 files changed, 103 insertions(+), 38 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 3453bf692..e445bdcaa 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -160,21 +160,7 @@ def _scored_most_critical_loading_time_interval( if weight_by_costs: # weight line violations with expansion costs - costs_lines = ( - line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) - ) - costs_trafos_lv = pd.Series( - index=[ - str(lv_grid) + "_station" - for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) - ], - data=edisgo_obj.config["costs_transformers"]["lv"], - ) - costs_trafos_mv = pd.Series( - index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], - data=edisgo_obj.config["costs_transformers"]["mv"], - ) - costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) + costs = _costs_per_line_and_transformer(edisgo_obj) crit_lines_weighted = crit_lines_score * costs else: crit_lines_weighted = crit_lines_score.copy() @@ -283,30 +269,10 @@ def _scored_most_critical_voltage_issues_time_interval( voltage_diff_feeder[voltage_diff_feeder != 0] = 1 if weight_by_costs: - # determine costs per feeder - costs_lines = ( - line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) - ) - costs_trafos_lv = pd.Series( - index=lv_station_buses, - data=edisgo_obj.config._data["costs_transformers"]["lv"], - ) - costs = pd.concat([costs_lines, costs_trafos_lv]) - - feeder_lines = edisgo_obj.topology.lines_df.grid_feeder - feeder_trafos_lv = pd.Series( - index=lv_station_buses, - data=lv_station_buses, - ) - feeder = pd.concat([feeder_lines, feeder_trafos_lv]) - costs_per_feeder = ( - pd.concat([costs.rename("costs"), feeder.rename("feeder")], axis=1) - .groupby(by="feeder")[["costs"]] - .sum() - ) - + # get costs per feeder + costs_per_feeder = _costs_per_feeder(edisgo_obj, lv_station_buses) # weight feeder voltage violation with costs per feeder - voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() + voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder time_intervals_df = _most_critical_time_interval( costs_per_time_step=voltage_diff_feeder, @@ -321,6 +287,93 @@ def _scored_most_critical_voltage_issues_time_interval( return time_intervals_df +def _costs_per_line_and_transformer(edisgo_obj): + """ + Helper function to get costs per line (including earthwork and costs for one new + line) and per transformer. + + Transformers are named after the grid at the lower voltage level and with the + expansion "_station", e.g. "LVGrid_0_station". + + Returns + ------- + :pandas:`pandas.Series` + Series with component name in index and costs in kEUR as values. + + """ + costs_lines = ( + line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) + ) + costs_trafos_lv = pd.Series( + index=[ + str(lv_grid) + "_station" + for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) + ], + data=edisgo_obj.config["costs_transformers"]["lv"], + ) + costs_trafos_mv = pd.Series( + index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], + data=edisgo_obj.config["costs_transformers"]["mv"], + ) + return pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) + + +def _costs_per_feeder(edisgo_obj, lv_station_buses=None): + """ + Helper function to get costs per MV and LV feeder (including earthwork and costs for + one new line) and per MV/LV transformer (as they are considered as feeders). + + Transformers are named after the bus at the MV/LV station's secondary side. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + lv_station_buses : list(str) or None + List of bus names of buses at the secondary side of the MV/LV transformers. + If None, list is generated. + + Returns + ------- + :pandas:`pandas.Series` + Series with feeder names in index and costs in kEUR as values. + + """ + if lv_station_buses is None: + lv_station_buses = [ + lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids + ] + if "grid_feeder" not in edisgo_obj.topology.buses_df.columns: + # set feeder using MV feeder for MV components and LV feeder for LV components + edisgo_obj.topology.assign_feeders(mode="grid_feeder") + + # feeders of buses at MV/LV station's secondary sides are set to the name of the + # station bus to have them as separate feeders + edisgo_obj.topology.buses_df.loc[lv_station_buses, "grid_feeder"] = lv_station_buses + + costs_lines = ( + line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) + ) + costs_trafos_lv = pd.Series( + index=lv_station_buses, + data=edisgo_obj.config._data["costs_transformers"]["lv"], + ) + costs = pd.concat([costs_lines, costs_trafos_lv]) + + feeder_lines = edisgo_obj.topology.lines_df.grid_feeder + feeder_trafos_lv = pd.Series( + index=lv_station_buses, + data=lv_station_buses, + ) + feeder = pd.concat([feeder_lines, feeder_trafos_lv]) + costs_per_feeder = ( + pd.concat([costs.rename("costs"), feeder.rename("feeder")], axis=1) + .groupby(by="feeder")[["costs"]] + .sum() + ) + + return costs_per_feeder.squeeze() + + def _most_critical_time_interval( costs_per_time_step, grid_issues_magnitude_df, diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 03a41b005..32d46770e 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -121,6 +121,18 @@ def test__scored_most_critical_voltage_issues_time_interval(self): == pd.date_range("1/1/2018 4:00", periods=72, freq="H") ).all() + def test__costs_per_line_and_transformer(self): + costs = temp_red._costs_per_line_and_transformer(self.edisgo) + assert len(costs) == 131 + 11 + assert np.isclose(costs["Line_10007"], 0.722445826838636 * 80) + assert np.isclose(costs["LVGrid_1_station"], 10) + + def test__costs_per_feeder(self): + costs = temp_red._costs_per_feeder(self.edisgo) + assert len(costs) == 37 + assert np.isclose(costs["Bus_BranchTee_MVGrid_1_1"], 295.34795) + assert np.isclose(costs["BusBar_MVGrid_1_LVGrid_1_LV"], 10) + def test_get_most_critical_time_intervals(self): self.edisgo.timeseries.scale_timeseries(p_scaling_factor=2, q_scaling_factor=2) steps = temp_red.get_most_critical_time_intervals( From 70b8da5b553202d7dec87e3001edadc7521689b2 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 29 Jan 2024 18:12:23 -0800 Subject: [PATCH 027/141] Add weighting by costs in time steps selection --- edisgo/edisgo.py | 6 + edisgo/flex_opt/reinforce_grid.py | 7 + edisgo/tools/temporal_complexity_reduction.py | 131 ++++++++++++++---- .../test_temporal_complexity_reduction.py | 21 ++- 4 files changed, 136 insertions(+), 29 deletions(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index e41ba99c8..577f3ec1e 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1328,6 +1328,12 @@ def reinforce( used to specify whether to run an initial analyze to determine most critical time steps or to use existing results. If set to False, `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + In case `reduced_analysis` is set to True, this parameter can be used + to specify whether to weight time steps by estimated grid expansion costs. + See parameter `weight_by_costs` in + :func:`~.tools.temporal_complexity_reduction.get_most_critical_time_steps` + for more information. Default: False. Returns -------- diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index a916f9aa7..a8be78421 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -111,6 +111,12 @@ def reinforce_grid( used to specify whether to run an initial analyze to determine most critical time steps or to use existing results. If set to False, `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to weight time steps by estimated grid expansion costs. + See parameter `weight_by_costs` in + :func:`~.tools.temporal_complexity_reduction.get_most_critical_time_steps` + for more information. Default: False. Returns ------- @@ -184,6 +190,7 @@ def reinforce_grid( percentage=kwargs.get("percentage", 1.0), use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), run_initial_analyze=kwargs.get("run_initial_analyze", True), + weight_by_costs=kwargs.get("weight_by_costs", False), ) if timesteps_pfa is not None and len(timesteps_pfa) == 0: logger.debug("Zero time steps for grid reinforcement.") diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index e445bdcaa..2544fdc68 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -17,21 +17,31 @@ logger = logging.getLogger(__name__) -def _scored_most_critical_loading(edisgo_obj: EDisGo) -> pd.Series: +def _scored_most_critical_loading( + edisgo_obj: EDisGo, weight_by_costs: bool = True +) -> pd.Series: """ - Method to get time steps where at least one component shows its highest overloading. + Get time steps sorted by severity of overloadings. + + The overloading can be weighted by the estimated expansion costs of each respective + line and transformer. See parameter `weight_by_costs` for more information. Parameters ----------- edisgo_obj : :class:`~.EDisGo` + weight_by_costs : bool + Defines whether overloading issues should be weighted by estimated grid + expansion costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_steps` for more information. + Default: True. Returns -------- :pandas:`pandas.Series` Series with time index and corresponding sum of maximum relative overloadings - of lines and transformers. The series only contains time steps, where at least - one component is maximally overloaded, and is sorted descending by the - sum of maximum relative overloadings. + of lines and transformers (weighted by estimated reinforcement costs, in case + `weight_by_costs` is True). The series only contains time steps, where at least + one component is maximally overloaded, and is sorted descending order. """ @@ -42,30 +52,47 @@ def _scored_most_critical_loading(edisgo_obj: EDisGo) -> pd.Series: crit_lines_score = relative_i_res[relative_i_res > 1] # Get most critical time steps per component + crit_lines_score = crit_lines_score[crit_lines_score == crit_lines_score.max()] + + if weight_by_costs: + # weight line violations with expansion costs + costs = _costs_per_line_and_transformer(edisgo_obj) + crit_lines_score = crit_lines_score * costs + else: + crit_lines_score = crit_lines_score - 1 + + # drop components and time steps without violations crit_lines_score = ( - (crit_lines_score[crit_lines_score == crit_lines_score.max()]) - .dropna(how="all") - .dropna(how="all", axis=1) + crit_lines_score.dropna(how="all").dropna(how="all", axis=1).fillna(0) ) - - # Sort according to highest cumulated relative overloading - crit_lines_score = (crit_lines_score - 1).sum(axis=1) - return crit_lines_score.sort_values(ascending=False) + # sort sum in descending order + return crit_lines_score.sum(axis=1).sort_values(ascending=False) -def _scored_most_critical_voltage_issues(edisgo_obj: EDisGo) -> pd.Series: +def _scored_most_critical_voltage_issues( + edisgo_obj: EDisGo, weight_by_costs: bool = True +) -> pd.Series: """ Method to get time steps where at least one bus shows its highest deviation from allowed voltage boundaries. + The voltage issues can be weighted by the estimated expansion costs in each + respective feeder. See parameter `weight_by_costs` for more information. + Parameters ----------- edisgo_obj : :class:`~.EDisGo` + weight_by_costs : bool + Defines whether voltage issues should be weighted by estimated grid expansion + costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_steps` for more information. + Default: True. Returns -------- :pandas:`pandas.Series` - Series with time index and corresponding sum of maximum voltage deviations. + Series with time index and corresponding sum of maximum voltage deviations + (weighted by estimated reinforcement costs, in case `weight_by_costs` is True). The series only contains time steps, where at least one bus has its highest deviation from the allowed voltage limits, and is sorted descending by the sum of maximum voltage deviations. @@ -76,18 +103,42 @@ def _scored_most_critical_voltage_issues(edisgo_obj: EDisGo) -> pd.Series: ) # Get score for nodes that are over or under the allowed deviations - voltage_diff = voltage_diff.abs()[voltage_diff.abs() > 0] + voltage_diff = voltage_diff[voltage_diff != 0.0].abs() # get only most critical events for component # Todo: should there be different ones for over and undervoltage? - voltage_diff = ( - (voltage_diff[voltage_diff.abs() == voltage_diff.abs().max()]) - .dropna(how="all") - .dropna(how="all", axis=1) - ) + voltage_diff = voltage_diff[voltage_diff == voltage_diff.max()] - voltage_diff = voltage_diff.sum(axis=1) + if weight_by_costs: + # set feeder using MV feeder for MV components and LV feeder for LV components + edisgo_obj.topology.assign_feeders(mode="grid_feeder") + # feeders of buses at MV/LV station's secondary sides are set to the name of the + # station bus to have them as separate feeders + lv_station_buses = [ + lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids + ] + edisgo_obj.topology.buses_df.loc[ + lv_station_buses, "grid_feeder" + ] = lv_station_buses + # weight voltage violations with expansion costs + costs = _costs_per_feeder(edisgo_obj, lv_station_buses=lv_station_buses) + # map feeder costs to buses + feeder_buses = edisgo_obj.topology.buses_df.grid_feeder + costs_buses = pd.Series( + { + bus_name: ( + costs[feeder_buses[bus_name]] + if feeder_buses[bus_name] != "station_node" + else 0 + ) + for bus_name in feeder_buses.index + } + ) + voltage_diff = voltage_diff * costs_buses - return voltage_diff.sort_values(ascending=False) + # drop components and time steps without violations + voltage_diff = voltage_diff.dropna(how="all").dropna(how="all", axis=1).fillna(0) + # sort sum in descending order + return voltage_diff.sum(axis=1).sort_values(ascending=False) def _scored_most_critical_loading_time_interval( @@ -101,7 +152,7 @@ def _scored_most_critical_loading_time_interval( """ Get time intervals sorted by severity of overloadings. - The overloading can weighted by the estimated expansion costs of each respective + The overloading can be weighted by the estimated expansion costs of each respective line and transformer. See parameter `weight_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and @@ -791,6 +842,7 @@ def get_most_critical_time_steps( percentage: float = 1.0, use_troubleshooting_mode=True, run_initial_analyze=True, + weight_by_costs=True, ) -> pd.DatetimeIndex: """ Get the time steps with the most critical overloading and voltage issues. @@ -835,6 +887,31 @@ def get_most_critical_time_steps( This parameter can be used to specify whether to run an initial analyze to determine most critical time steps or to use existing results. If set to False, `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + Defines whether overloading and voltage issues should be weighted by estimated + grid expansion costs or not. This can be done in order to take into account that + some grid issues are more relevant, as reinforcing a certain line or feeder will + be more expensive than another one. + + In case of voltage issues: + If True, the voltage issues at each bus are weighted by the estimated grid + expansion costs for the MV or LV feeder the bus is in or in case of MV/LV + stations by the costs for a new transformer. Feeder costs are determined using + the costs for earth work and new lines over the full length of the feeder. + The costs don't convey the actual costs but are an estimation, as + the real number of parallel lines needed is not determined and the whole feeder + length is used instead of the length over two-thirds of the feeder. + If False, the severity of each feeder's voltage issue is set to be the same. + + In case of overloading issues: + If True, the overloading of each line is multiplied by + the respective grid expansion costs of that line including costs for earth work + and one new line. + The costs don't convey the actual costs but are an estimation, as + the discrete needed number of parallel lines is not considered. + If False, only the relative overloading is used. + + Default: True. Returns -------- @@ -865,7 +942,9 @@ def get_most_critical_time_steps( ) # Select most critical steps based on current violations - loading_scores = _scored_most_critical_loading(edisgo_obj) + loading_scores = _scored_most_critical_loading( + edisgo_obj, weight_by_costs=weight_by_costs + ) if num_steps_loading is None: num_steps_loading = int(len(loading_scores) * percentage) else: @@ -880,7 +959,9 @@ def get_most_critical_time_steps( steps = loading_scores[:num_steps_loading].index # Select most critical steps based on voltage violations - voltage_scores = _scored_most_critical_voltage_issues(edisgo_obj) + voltage_scores = _scored_most_critical_voltage_issues( + edisgo_obj, weight_by_costs=weight_by_costs + ) if num_steps_voltage is None: num_steps_voltage = int(len(voltage_scores) * percentage) else: diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 32d46770e..c80caf231 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -32,19 +32,32 @@ def setup_class(self): self.edisgo.analyze() def test__scored_most_critical_loading(self): - ts_crit = temp_red._scored_most_critical_loading(self.edisgo) - + ts_crit = temp_red._scored_most_critical_loading( + self.edisgo, weight_by_costs=False + ) assert len(ts_crit) == 180 assert np.isclose(ts_crit.iloc[0], 1.45613) assert np.isclose(ts_crit.iloc[-1], 1.14647) - def test__scored_most_critical_voltage_issues(self): - ts_crit = temp_red._scored_most_critical_voltage_issues(self.edisgo) + ts_crit = temp_red._scored_most_critical_loading(self.edisgo) + assert len(ts_crit) == 180 + assert np.isclose(ts_crit.iloc[0], 190.63611) + assert np.isclose(ts_crit.iloc[-1], 48.13501) + + def test__scored_most_critical_voltage_issues(self): + ts_crit = temp_red._scored_most_critical_voltage_issues( + self.edisgo, weight_by_costs=False + ) assert len(ts_crit) == 120 assert np.isclose(ts_crit.iloc[0], 0.01062258) assert np.isclose(ts_crit.iloc[-1], 0.01062258) + ts_crit = temp_red._scored_most_critical_voltage_issues(self.edisgo) + assert len(ts_crit) == 120 + assert np.isclose(ts_crit.iloc[0], 0.1062258) + assert np.isclose(ts_crit.iloc[-1], 0.1062258) + def test_get_most_critical_time_steps(self): ts_crit = temp_red.get_most_critical_time_steps( self.edisgo, num_steps_loading=2, num_steps_voltage=2 From cf0c41220666c18074f4d37ce5fa535ad97bba69 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 29 Jan 2024 18:35:58 -0800 Subject: [PATCH 028/141] Add tests --- edisgo/tools/temporal_complexity_reduction.py | 4 ++-- .../test_temporal_complexity_reduction.py | 24 ++++++++++++++++++- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 2544fdc68..7938dbaf0 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -57,7 +57,7 @@ def _scored_most_critical_loading( if weight_by_costs: # weight line violations with expansion costs costs = _costs_per_line_and_transformer(edisgo_obj) - crit_lines_score = crit_lines_score * costs + crit_lines_score = crit_lines_score * costs.loc[crit_lines_score.columns] else: crit_lines_score = crit_lines_score - 1 @@ -133,7 +133,7 @@ def _scored_most_critical_voltage_issues( for bus_name in feeder_buses.index } ) - voltage_diff = voltage_diff * costs_buses + voltage_diff = voltage_diff * costs_buses.loc[voltage_diff.columns] # drop components and time steps without violations voltage_diff = voltage_diff.dropna(how="all").dropna(how="all", axis=1).fillna(0) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index c80caf231..0fcbc7356 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -60,7 +60,11 @@ def test__scored_most_critical_voltage_issues(self): def test_get_most_critical_time_steps(self): ts_crit = temp_red.get_most_critical_time_steps( - self.edisgo, num_steps_loading=2, num_steps_voltage=2 + self.edisgo, + num_steps_loading=2, + num_steps_voltage=2, + weight_by_costs=False, + run_initial_analyze=False, ) assert len(ts_crit) == 3 @@ -72,6 +76,24 @@ def test_get_most_critical_time_steps(self): ) assert len(ts_crit) == 2 + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + mode="lv", + lv_grid_id=2, + percentage=0.5, + num_steps_voltage=2, + ) + assert len(ts_crit) == 0 + + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + mode="lv", + lv_grid_id=6, + percentage=0.5, + num_steps_voltage=2, + ) + assert len(ts_crit) == 60 + def test__scored_most_critical_loading_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_loading_time_interval(self.edisgo, 24) From 139fd7f9d203dd3dd880f53fa4f707092ebf5e49 Mon Sep 17 00:00:00 2001 From: birgits Date: Tue, 30 Jan 2024 16:25:02 -0800 Subject: [PATCH 029/141] Add test --- tests/flex_opt/test_reinforce_grid.py | 6 ++---- tests/test_edisgo.py | 11 +++++++++++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/flex_opt/test_reinforce_grid.py b/tests/flex_opt/test_reinforce_grid.py index ad1b296a7..cb6074310 100644 --- a/tests/flex_opt/test_reinforce_grid.py +++ b/tests/flex_opt/test_reinforce_grid.py @@ -59,11 +59,9 @@ def test_reinforce_grid(self): res_reduced = reinforce_grid( edisgo=copy.deepcopy(self.edisgo), reduced_analysis=True, - num_steps_loading=4, - ) - assert_frame_equal( - res_reduced.equipment_changes, results_dict[None].equipment_changes + num_steps_loading=2, ) + assert len(res_reduced.i_res) == 2 def test_run_separate_lv_grids(self): edisgo = copy.deepcopy(self.edisgo) diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index e2642bf9a..bac0789cb 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -546,6 +546,17 @@ def test_enhanced_reinforce_grid(self): assert len(results.equipment_changes) == 892 assert results.v_res.shape == (4, 148) + edisgo_obj = copy.deepcopy(self.edisgo) + edisgo_obj = enhanced_reinforce_grid( + edisgo_obj, + reduced_analysis=True, + is_worst_case=False, + separate_lv_grids=True, + num_steps_loading=1, + num_steps_voltage=1, + ) + assert edisgo_obj.results.v_res.shape == (2, 162) + def test_add_component(self, caplog): self.setup_worst_case_time_series() index = self.edisgo.timeseries.timeindex From b6d2f9edde954e64e317e88548934112d78bf620 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 31 Jan 2024 17:13:44 -0800 Subject: [PATCH 030/141] Limit pandas version --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e28c14695..71e1618b6 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,9 @@ def read(fname): "matplotlib >= 3.3.0", "multiprocess", "networkx >= 2.5.0", - "pandas >= 1.4.0", + # newer pandas versions don't work with specified sqlalchemy versions, but upgrading + # sqlalchemy leads to new errors.. should be fixed at some point + "pandas < 2.2.0", "plotly", "pydot", "pygeos", From 70c3264caa39bbe875231c9b88f8415b4c182d01 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 31 Jan 2024 17:39:50 -0800 Subject: [PATCH 031/141] Add changes to whatsnew --- doc/whatsnew/v0-3-0.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/whatsnew/v0-3-0.rst b/doc/whatsnew/v0-3-0.rst index 3eeccb3c2..7cb375542 100644 --- a/doc/whatsnew/v0-3-0.rst +++ b/doc/whatsnew/v0-3-0.rst @@ -22,6 +22,7 @@ Changes * Added method to aggregate LV grid buses to station bus secondary side `#353 `_ * Adapted codebase to work with pandas 2.0 `#373 `_ * Added option to run reinforcement with reduced number of time steps `#379 `_ + (adapted in `#395 `_) * Added optimization method to determine dispatch of flexibilities that lead to minimal network expansion costs `#376 `_ * Added a new reinforcement method that separate lv grids when the overloading is very high `#380 `_ * Move function to assign feeder to Topology class and add methods to the Grid class to get information on the feeders `#360 `_ From 85b49f4b8d78fcf5840988885d76afba4a00fa5e Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 16 Feb 2024 10:19:20 -0800 Subject: [PATCH 032/141] Add logging information --- edisgo/tools/temporal_complexity_reduction.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 7938dbaf0..7d72d26b3 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -956,6 +956,12 @@ def get_most_critical_time_steps( f"{len(loading_scores)} time steps are exported." ) num_steps_loading = len(loading_scores) + elif num_steps_loading < len(loading_scores): + logger.info( + f"{num_steps_loading} of a total of {len(loading_scores)} relevant " + f"time steps for overloading issues are chosen for the selection " + f"of most critical time steps." + ) steps = loading_scores[:num_steps_loading].index # Select most critical steps based on voltage violations @@ -973,6 +979,12 @@ def get_most_critical_time_steps( f"{len(voltage_scores)} time steps are exported." ) num_steps_voltage = len(voltage_scores) + elif num_steps_voltage < len(voltage_scores): + logger.info( + f"{num_steps_voltage} of a total of {len(voltage_scores)} relevant " + f"time steps for voltage issues are chosen for the selection " + f"of most critical time steps." + ) steps = steps.append(voltage_scores[:num_steps_voltage].index) if len(steps) == 0: From 9e0d85d068d9d69d482de069b029d817ec214868 Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 16 Feb 2024 11:03:35 -0800 Subject: [PATCH 033/141] Add ToDo for how function could be improved --- edisgo/tools/temporal_complexity_reduction.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 7d72d26b3..977f49073 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -44,7 +44,12 @@ def _scored_most_critical_loading( one component is maximally overloaded, and is sorted descending order. """ - + # ToDo The relative loading is used in this function to determine most critical + # time steps. While this makes sense to determine which lines are overloaded, it + # is not the best indicator for the weighting as it does not convey the number + # of additional lines needed to solve a problem. For that the number of parallel + # standard lines and transformers needed would be better. However, for now + # using the relative overloading as an estimation is okay. # Get current relative to allowed current relative_i_res = check_tech_constraints.components_relative_load(edisgo_obj) From c272fdae55b0fabe310364cff2d24f7d12383233 Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 16 Feb 2024 13:51:55 -0800 Subject: [PATCH 034/141] Restrict pandas version --- eDisGo_env.yml | 2 +- eDisGo_env_dev.yml | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index f35301247..45b797e16 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -5,7 +5,7 @@ channels: dependencies: - python >= 3.8, < 3.10 - pip - - pandas >= 1.4 + - pandas >= 1.4, < 2.2.0 - conda-forge::fiona - conda-forge::geopy - conda-forge::geopandas diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index a59866094..ae86632ac 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -5,7 +5,7 @@ channels: dependencies: - python >= 3.8, < 3.10 - pip - - pandas >= 1.4 + - pandas >= 1.4, < 2.2.0 - conda-forge::fiona - conda-forge::geopy - conda-forge::geopandas diff --git a/setup.py b/setup.py index 71e1618b6..4f570f745 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ def read(fname): "networkx >= 2.5.0", # newer pandas versions don't work with specified sqlalchemy versions, but upgrading # sqlalchemy leads to new errors.. should be fixed at some point - "pandas < 2.2.0", + "pandas >= 1.4.0, < 2.2.0", "plotly", "pydot", "pygeos", From dfb71aec4ccf6da6f0ab87a8bdc196e135558515 Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 16 Feb 2024 14:30:45 -0800 Subject: [PATCH 035/141] Change links that are now redirected --- doc/quickstart.rst | 4 ++-- edisgo/tools/tools.py | 2 +- examples/edisgo_simple_example.ipynb | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 21d38303b..4ce437c05 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -91,7 +91,7 @@ The steps required to set up HSL are also described in the Here is a short version for reference: First, you need to obtain an academic license for HSL Solvers. -Under https://www.hsl.rl.ac.uk/ipopt/ download the sources for Coin-HSL Full (Stable). +Under https://licences.stfc.ac.uk/product/coin-hsl download the sources for Coin-HSL Full (Stable). You will need to provide an institutional e-mail to gain access. Unpack the tar.gz: @@ -163,7 +163,7 @@ Beyond a running and up-to-date installation of eDisGo you need **grid topology data**. Currently synthetic grid data generated with the python project `Ding0 `_ is the only supported data source. You can retrieve data from -`Zenodo `_ +`Zenodo `_ (make sure you choose latest data) or check out the `Ding0 documentation `_ on how to generate grids yourself. diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index ead8e08d2..d05fe1b86 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -1119,7 +1119,7 @@ def reduce_memory_usage(df: pd.DataFrame, show_reduction: bool = False) -> pd.Da be reduced to a smaller data type. Source: - https://www.mikulskibartosz.name/how-to-reduce-memory-usage-in-pandas/ + https://mikulskibartosz.name/how-to-reduce-memory-usage-in-pandas Parameters ---------- diff --git a/examples/edisgo_simple_example.ipynb b/examples/edisgo_simple_example.ipynb index b0a68fc63..c7ee79ce1 100644 --- a/examples/edisgo_simple_example.ipynb +++ b/examples/edisgo_simple_example.ipynb @@ -112,7 +112,7 @@ "Currently, synthetic grid data generated with the python project\n", "[ding0](https://github.com/openego/ding0)\n", "is the only supported data source for distribution grid data. ding0 provides the grid topology data in the form of csv files, with separate files for buses, lines, loads, generators, etc. You can retrieve ding0 data from\n", - "[Zenodo](https://zenodo.org/record/890479)\n", + "[Zenodo](https://zenodo.org/records/890479)\n", "(make sure you choose latest data) or check out the\n", "[Ding0 documentation](https://dingo.readthedocs.io/en/dev/usage_details.html#ding0-examples)\n", "on how to generate grids yourself. A ding0 example grid can be viewed [here](https://github.com/openego/eDisGo/tree/dev/tests/data/ding0_test_network_2). It is possible to provide your own grid data if it is in the same format as the ding0 grid data. \n", From 7ec2ec2e54d2409f0430001d5985aeb84ef7cedc Mon Sep 17 00:00:00 2001 From: birgits Date: Fri, 16 Feb 2024 14:36:17 -0800 Subject: [PATCH 036/141] Only conduct reinforcement for previously non-converging time steps if grid has beed reinforced in between --- edisgo/flex_opt/reinforce_grid.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index a8be78421..fd2896d31 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -685,7 +685,7 @@ def catch_convergence_reinforce_grid( Reinforcement strategy to reinforce grids with non-converging time steps. First, conducts a grid reinforcement with only converging time steps. - Afterwards, tries to run reinforcement with all time steps that did not converge + Afterward, tries to run reinforcement with all time steps that did not converge in the beginning. At last, if there are still time steps that do not converge, the feed-in and load time series are iteratively scaled and the grid reinforced, starting with a low grid load and scaling-up the time series until the original @@ -769,15 +769,17 @@ def reinforce(): troubleshooting_mode = troubleshooting_mode_set reinforce() - # Run reinforcement for time steps that did not converge after initial reinforcement - if not non_converging_timesteps.empty: - logger.info( - "Run reinforcement for time steps that did not converge after initial " - "reinforcement." - ) - selected_timesteps = non_converging_timesteps - troubleshooting_mode = False - converged = reinforce() + # Run reinforcement for time steps that did not converge after initial + # reinforcement (only needs to done, when grid was previously reinforced using + # converged time steps, wherefore it is within that if-statement) + if not non_converging_timesteps.empty: + logger.info( + "Run reinforcement for time steps that did not converge after initial " + "reinforcement." + ) + selected_timesteps = non_converging_timesteps + troubleshooting_mode = False + converged = reinforce() if converged: return edisgo.results From a1ac8c8e74c6a6a89e75a57b3a7e113be3de816a Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 19 Feb 2024 13:08:25 -0800 Subject: [PATCH 037/141] Use maximum deviation in feeder rather than weighting all deviations equally --- edisgo/tools/temporal_complexity_reduction.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 977f49073..973d23b36 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -320,9 +320,8 @@ def _scored_most_critical_voltage_issues_time_interval( voltage_diff_feeder = voltage_diff.copy() voltage_diff_feeder.columns = columns voltage_diff_feeder = ( - voltage_diff.transpose().reset_index().groupby(by="Bus").sum().transpose() + voltage_diff.transpose().reset_index().groupby(by="Bus").max().transpose() ) - voltage_diff_feeder[voltage_diff_feeder != 0] = 1 if weight_by_costs: # get costs per feeder @@ -726,14 +725,14 @@ def get_most_critical_time_intervals( The costs don't convey the actual costs but are an estimation, as the real number of parallel lines needed is not determined and the whole feeder length is used instead of the length over two-thirds of the feeder. - If False, the severity of each feeder's voltage issue is set to be the same. + If False, only the maximum voltage deviation in the feeder is used to determine + the most relevant time intervals. In case of overloading issues: - If True, the overloading of each line is multiplied by - the respective grid expansion costs of that line including costs for earth work - and one new line. + If True, the overloading of each line is multiplied by the respective grid + expansion costs of that line including costs for earth work and one new line. The costs don't convey the actual costs but are an estimation, as - the discrete needed number of parallel lines is not considered. + the discrete number of needed parallel lines is not considered. If False, only the relative overloading is used to determine the most relevant time intervals. From b99abba45eb1f514d7fba62877e873720c04be3b Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 19 Feb 2024 15:25:25 -0800 Subject: [PATCH 038/141] Add ToDo --- tests/test_edisgo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index bac0789cb..1f09e62d9 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -430,6 +430,7 @@ def test_analyze(self, caplog): assert "Current fraction in iterative process: 1.0." in caplog.text def test_reinforce(self): + # ToDo add tests to check content of equipment_changes # ###################### test with default settings ########################## self.setup_worst_case_time_series() results = self.edisgo.reinforce() From f04bc2fa18f153b46d2a6a6a8e78ddb56dfe8494 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 19 Feb 2024 15:38:23 -0800 Subject: [PATCH 039/141] Change default for use_troubleshotting_mode as True is not useful in grid reinforcement --- edisgo/edisgo.py | 14 ++++++++++---- edisgo/flex_opt/reinforce_grid.py | 11 ++--------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 577f3ec1e..dca1660b2 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1316,13 +1316,19 @@ def reinforce( default is 1.0, in which case all most critical time steps are selected. Default: 1.0. use_troubleshooting_mode : bool - In case `reduced_analysis` is set to True, this parameter can be - used to specify how to handle non-convergence issues in the power flow - analysis. If set to True, non-convergence issues are tried to be + In case `reduced_analysis` is set to True, this parameter can be used to + specify how to handle non-convergence issues when determining the most + critical time steps. If set to True, non-convergence issues are tried to be circumvented by reducing load and feed-in until the power flow converges. The most critical time steps are then determined based on the power flow results with the reduced load and feed-in. If False, an error will be - raised in case time steps do not converge. Default: True. + raised in case time steps do not converge. + Setting this to True doesn't make sense for the grid reinforcement as the + troubleshooting mode is only used when determining the most critical time + steps not when running a power flow analysis to determine grid reinforcement + needs. To handle non-convergence in the grid reinforcement set parameter + `catch_convergence_problems` to True. + Default: False. run_initial_analyze : bool In case `reduced_analysis` is set to True, this parameter can be used to specify whether to run an initial analyze to determine most diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index fd2896d31..e56eb58b4 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -105,7 +105,7 @@ def reinforce_grid( In case `reduced_analysis` is set to True, this parameter can be used to specify how to handle non-convergence issues in the power flow analysis. See parameter `use_troubleshooting_mode` in function :attr:`~.EDisGo.reinforce` - for more information. Default: True. + for more information. Default: False. run_initial_analyze : bool In case `reduced_analysis` is set to True, this parameter can be used to specify whether to run an initial analyze to determine most @@ -188,7 +188,7 @@ def reinforce_grid( num_steps_loading=kwargs.get("num_steps_loading", None), num_steps_voltage=kwargs.get("num_steps_voltage", None), percentage=kwargs.get("percentage", 1.0), - use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), + use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", False), run_initial_analyze=kwargs.get("run_initial_analyze", True), weight_by_costs=kwargs.get("weight_by_costs", False), ) @@ -712,7 +712,6 @@ def reinforce(): edisgo, timesteps_pfa=selected_timesteps, scale_timeseries=set_scaling_factor, - use_troubleshooting_mode=troubleshooting_mode, **kwargs, ) converged = True @@ -728,13 +727,11 @@ def reinforce(): # Get the timesteps from kwargs and then remove it to set it later manually timesteps_pfa = kwargs.pop("timesteps_pfa", None) selected_timesteps = timesteps_pfa - troubleshooting_mode_set = kwargs.pop("troubleshooting_mode", True) # Initial try logger.info("Run initial reinforcement.") set_scaling_factor = 1.0 iteration = 0 - troubleshooting_mode = False converged = reinforce() if converged is False: logger.info("Initial reinforcement did not succeed.") @@ -766,7 +763,6 @@ def reinforce(): "reinforcement." ) selected_timesteps = converging_timesteps - troubleshooting_mode = troubleshooting_mode_set reinforce() # Run reinforcement for time steps that did not converge after initial @@ -778,7 +774,6 @@ def reinforce(): "reinforcement." ) selected_timesteps = non_converging_timesteps - troubleshooting_mode = False converged = reinforce() if converged: @@ -812,7 +807,6 @@ def reinforce(): ) + highest_converged_scaling_factor logger.info(f"Try reinforcement with {set_scaling_factor=} at {iteration=}") - troubleshooting_mode = False converged = reinforce() if converged: logger.info( @@ -833,7 +827,6 @@ def reinforce(): if set_scaling_factor != 1: logger.info("Run final reinforcement.") selected_timesteps = timesteps_pfa - troubleshooting_mode = False reinforce() return edisgo.results From 0657cd10a7c0c4e2d2ca105d396f39ab54077079 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 19 Feb 2024 15:54:55 -0800 Subject: [PATCH 040/141] Try fixing failing link check github action --- doc/conf.py | 2 +- edisgo/io/db.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 32f385cbc..17724e9dc 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -116,7 +116,7 @@ def setup(sphinx): "networkx.%s", ), "sqlalchemy": ( - "http://docs.sqlalchemy.org/en/latest/core/connections.html#%s", + "https://docs.sqlalchemy.org/en/20/core/connections.html#%s", "sqlalchemy.%s", ), "numpy": ( diff --git a/edisgo/io/db.py b/edisgo/io/db.py index 02d1320e8..19dfabcde 100644 --- a/edisgo/io/db.py +++ b/edisgo/io/db.py @@ -163,7 +163,7 @@ def engine(path: Path | str, ssh: bool = False) -> Engine: Returns ------- - sqlalchemy.engine.base.Engine + :sqlalchemy:`sqlalchemy.Engine` Database engine """ From 79b5a312eeffb5f16596d474e14ba1b5a987c5c8 Mon Sep 17 00:00:00 2001 From: birgits Date: Mon, 19 Feb 2024 16:21:28 -0800 Subject: [PATCH 041/141] Ignore stackoverflow links when checking links Stackoverflow links for some reason now fail when checked with github actions, even though the link is correct. They are therefore for now ignored. --- doc/conf.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index 17724e9dc..851a3fb6d 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -116,7 +116,7 @@ def setup(sphinx): "networkx.%s", ), "sqlalchemy": ( - "https://docs.sqlalchemy.org/en/20/core/connections.html#%s", + "https://docs.sqlalchemy.org/en/latest/core/connections.html#%s", "sqlalchemy.%s", ), "numpy": ( @@ -134,6 +134,11 @@ def setup(sphinx): "plotly.%s", ), } +# ignore the following external links when checking the links +# stackoverflow is listed here because for some reason the link check fails for these +# in the github action, even though the link is correct +linkcheck_ignore = [r"https://stackoverflow.com*"] + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] From 4087d342b66b71d3ba10134d729b4e425290c9f6 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:45:02 +0200 Subject: [PATCH 042/141] Fix logger directory creation and add tests --- edisgo/tools/logger.py | 4 +- tests/tools/test_logger.py | 82 ++++++++++++++++++++++++-------------- 2 files changed, 53 insertions(+), 33 deletions(-) diff --git a/edisgo/tools/logger.py b/edisgo/tools/logger.py index 5b1a62a55..20514f947 100644 --- a/edisgo/tools/logger.py +++ b/edisgo/tools/logger.py @@ -119,7 +119,7 @@ def setup_logger( def create_dir(dir_path): if not os.path.isdir(dir_path): - os.mkdir(dir_path) + os.makedirs(dir_path) def get_default_root_dir(): dir_path = str(cfg_edisgo.get("user_dirs", "root_dir")) @@ -140,9 +140,9 @@ def create_home_dir(): log_dir = os.path.join( get_default_root_dir(), cfg_edisgo.get("user_dirs", "log_dir") ) - create_dir(log_dir) if log_dir is not None: + create_dir(log_dir) file_name = os.path.join(log_dir, file_name) if reset_loggers: diff --git a/tests/tools/test_logger.py b/tests/tools/test_logger.py index 80e372eef..eee071df6 100644 --- a/tests/tools/test_logger.py +++ b/tests/tools/test_logger.py @@ -1,65 +1,85 @@ import logging import os +import pytest + from edisgo.tools.logger import setup_logger +def check_file_output(filename, output): + with open(filename) as file: + last_line = file.readlines()[-1].split(" ")[3:] + last_line = " ".join(last_line) + assert last_line == output + + +def reset_loggers(filename): + logger = logging.getLogger("edisgo") + logger.handlers.clear() + logger.propagate = True + # try removing file - when run on github for Windows removing the file leads + # to a PermissionError + try: + os.remove(filename) + except PermissionError: + pass + + class TestClass: def test_setup_logger(self): - def check_file_output(output): - with open("edisgo.log") as file: - last_line = file.readlines()[-1].split(" ")[3:] - last_line = " ".join(last_line) - assert last_line == output - - def reset_loggers(): - logger = logging.getLogger("edisgo") - logger.propagate = True - logger.handlers.clear() - logger = logging.getLogger() - logger.handlers.clear() - - if os.path.exists("edisgo.log"): - os.remove("edisgo.log") + filename = os.path.join( + os.path.expanduser("~"), ".edisgo", "log", "test_log.log" + ) + if os.path.exists(filename): + os.remove(filename) setup_logger( loggers=[ {"name": "root", "file_level": "debug", "stream_level": "debug"}, {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", + file_name="test_log.log", + log_dir="default", ) logger = logging.getLogger("edisgo") # Test that edisgo logger writes to file. logger.debug("root") - check_file_output("edisgo - DEBUG: root\n") + check_file_output(filename, "edisgo - DEBUG: root\n") # Test that root logger writes to file. logging.debug("root") - check_file_output("root - DEBUG: root\n") + check_file_output(filename, "root - DEBUG: root\n") + + reset_loggers(filename) - # reset_loggers() + @pytest.mark.runonlinux + def test_setup_logger_2(self): + """ + This test is only run on linux, as the log file is written to the user + home directory, which is not allowed when tests are run on github. + + """ + + # delete any existing log files + log_files = [_ for _ in os.listdir(os.getcwd()) if ".log" in _] + for log_file in log_files: + os.remove(log_file) setup_logger( loggers=[ {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", reset_loggers=True, debug_message=True, ) logger = logging.getLogger("edisgo") + + filename = [_ for _ in os.listdir(os.getcwd()) if ".log" in _][0] # Test that edisgo logger writes to file. logger.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - # Test that root logger doesn't writes to file. - logging.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - - @classmethod - def teardown_class(cls): - logger = logging.getLogger("edisgo") - logger.handlers.clear() - logger.propagate = True + check_file_output(filename, "edisgo - DEBUG: edisgo\n") + # Test that root logger doesn't write to file. + logging.debug("root") + check_file_output(filename, "edisgo - DEBUG: edisgo\n") - os.remove("edisgo.log") + reset_loggers(filename) From 5f25525c9a9dc8d96692894b562ae69dd6bb6ed6 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:46:18 +0200 Subject: [PATCH 043/141] Use number of parallel lines not 1 --- edisgo/network/results.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/network/results.py b/edisgo/network/results.py index 8f6e8b044..151fbd480 100755 --- a/edisgo/network/results.py +++ b/edisgo/network/results.py @@ -628,7 +628,7 @@ def _add_line_to_equipment_changes(self, line): "iteration_step": [0], "change": ["added"], "equipment": [line.type_info], - "quantity": [1], + "quantity": [line.num_parallel], }, index=[line.name], ), From f77fd611d789ddf27eb1444f28474a1131981069 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:47:17 +0200 Subject: [PATCH 044/141] Enable getting electricity profiles for status quo --- edisgo/io/timeseries_import.py | 47 +++++++++++++----------------- tests/io/test_timeseries_import.py | 15 ++++++++++ 2 files changed, 35 insertions(+), 27 deletions(-) diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 99204b1c2..83e4afc65 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -1191,7 +1191,7 @@ def get_residential_electricity_profiles_per_building(building_ids, scenario, en List of building IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. @@ -1220,30 +1220,21 @@ def _get_scaling_factors_of_zensus_cells(zensus_ids): column factor. """ - with session_scope_egon_data(engine) as session: - if scenario == "eGon2035": - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2035.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - else: - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2050.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - return pd.read_sql(query.statement, engine, index_col="cell_id") + if scenario == "eGon2021": + return pd.DataFrame(index=zensus_ids, data={"factor": 1.0}) + else: + with session_scope_egon_data(engine) as session: + if scenario == "eGon2035": + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2035.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + else: + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2050.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + return pd.read_sql(query.statement, engine, index_col="cell_id") def _get_profile_ids_of_buildings(building_ids): """ @@ -1302,7 +1293,9 @@ def _get_profiles(profile_ids): saio.register_schema("demand", engine) from saio.demand import ( - egon_household_electricity_profile_in_census_cell, + egon_household_electricity_profile_in_census_cell as hh_profile, + ) + from saio.demand import ( egon_household_electricity_profile_of_buildings, iee_household_load_profiles, ) @@ -1350,7 +1343,7 @@ def get_industrial_electricity_profiles_per_site(site_ids, scenario, engine): List of industrial site and OSM IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 6c88f54b4..93825e9ac 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -268,6 +268,13 @@ def test_get_residential_electricity_profiles_per_building(self): assert df.shape == (8760, 1) assert np.isclose(df.loc[:, 442081].sum(), 3.20688, atol=1e-3) + # test with status quo + df = timeseries_import.get_residential_electricity_profiles_per_building( + [-1, 442081], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 1) + assert np.isclose(df.loc[:, 442081].sum(), 4.288845, atol=1e-3) + @pytest.mark.local def test_get_industrial_electricity_profiles_per_site(self): # test with one site and one OSM area @@ -283,3 +290,11 @@ def test_get_industrial_electricity_profiles_per_site(self): [541658], "eGon2035", pytest.engine ) assert df.shape == (8760, 1) + + # test with status quo + df = timeseries_import.get_industrial_electricity_profiles_per_site( + [1, 541658], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 2) + assert np.isclose(df.loc[:, 1].sum(), 31655.640, atol=1e-3) + assert np.isclose(df.loc[:, 541658].sum(), 2910.816, atol=1e-3) From 767931008d04edd60f1838257fb46cce224c4b7f Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:47:53 +0200 Subject: [PATCH 045/141] Minor fix --- edisgo/io/powermodels_io.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/edisgo/io/powermodels_io.py b/edisgo/io/powermodels_io.py index 16449bd17..b0fb22781 100644 --- a/edisgo/io/powermodels_io.py +++ b/edisgo/io/powermodels_io.py @@ -957,10 +957,16 @@ def _build_load( ) pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") p_d = -min( - [psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) q_d = -max( - [psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) pm["load"][str(stor_i + len(loads_df.index) + 1)] = { "pd": p_d.round(20) / s_base, From 3983c4917844ef6d889574f82675dd473c110d72 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:50:18 +0200 Subject: [PATCH 046/141] Change workflow --- .github/workflows/tests-coverage.yml | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index e0a72b1cd..951972d4d 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -38,7 +38,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Set up julia - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' uses: julia-actions/setup-julia@v1 with: version: "1.6" @@ -58,8 +58,14 @@ jobs: environment-file: eDisGo_env_dev.yml python-version: ${{ matrix.python-version }} - - name: Run tests - if: ${{ !(runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage') }} + - name: Run tests Linux + if: runner.os == 'Linux' && matrix.name-suffix != 'coverage' + run: | + python -m pip install pytest pytest-notebook + python -m pytest --runslow --runonlinux --disable-warnings --color=yes -v + + - name: Run tests Windows + if: runner.os == 'Windows' run: | python -m pip install pytest pytest-notebook python -m pytest --runslow --disable-warnings --color=yes -v From be098336a5451295c4571d554d2f7714a8ca94a7 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 13:55:38 +0200 Subject: [PATCH 047/141] Fixes in costs calculation --- edisgo/flex_opt/costs.py | 104 +++++++++++++++++++++++--- tests/flex_opt/test_costs.py | 41 ++++++++-- tests/flex_opt/test_reinforce_grid.py | 2 +- tests/test_edisgo.py | 4 +- 4 files changed, 131 insertions(+), 20 deletions(-) diff --git a/edisgo/flex_opt/costs.py b/edisgo/flex_opt/costs.py index 8bd63d9b7..7cbe359b1 100644 --- a/edisgo/flex_opt/costs.py +++ b/edisgo/flex_opt/costs.py @@ -1,3 +1,4 @@ +import logging import os import pandas as pd @@ -7,6 +8,8 @@ from edisgo.tools.geo import proj2equidistant +logger = logging.getLogger(__name__) + def grid_expansion_costs(edisgo_obj, without_generator_import=False): """ @@ -67,7 +70,8 @@ def _get_transformer_costs(trafos): costs_trafos = pd.DataFrame( { "costs_transformers": len(hvmv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["mv"])] + * [float(edisgo_obj.config["costs_transformers"]["mv"])], + "voltage_level": len(hvmv_trafos) * ["hv/mv"], }, index=hvmv_trafos, ) @@ -77,13 +81,14 @@ def _get_transformer_costs(trafos): pd.DataFrame( { "costs_transformers": len(mvlv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["lv"])] + * [float(edisgo_obj.config["costs_transformers"]["lv"])], + "voltage_level": len(mvlv_trafos) * ["mv/lv"], }, index=mvlv_trafos, ), ] ) - return costs_trafos.loc[trafos.index, "costs_transformers"].values + return costs_trafos.loc[trafos.index, :] def _get_line_costs(lines_added): costs_lines = line_expansion_costs(edisgo_obj, lines_added.index) @@ -107,9 +112,8 @@ def _get_line_costs(lines_added): # costs for transformers if not equipment_changes.empty: transformers = equipment_changes[ - equipment_changes.index.isin( - [f"{_}_station" for _ in edisgo_obj.topology._grids_repr] - ) + equipment_changes.equipment.str.contains("Transformer") + | equipment_changes.equipment.str.contains("transformer") ] added_transformers = transformers[transformers["change"] == "added"] removed_transformers = transformers[transformers["change"] == "removed"] @@ -129,15 +133,16 @@ def _get_line_costs(lines_added): ) trafos = all_trafos.loc[added_transformers["equipment"]] # calculate costs for each transformer + transformer_costs = _get_transformer_costs(trafos) costs = pd.concat( [ costs, pd.DataFrame( { "type": trafos.type_info.values, - "total_costs": _get_transformer_costs(trafos), + "total_costs": transformer_costs.costs_transformers, "quantity": len(trafos) * [1], - "voltage_level": len(trafos) * ["mv/lv"], + "voltage_level": transformer_costs.voltage_level, }, index=trafos.index, ), @@ -161,6 +166,19 @@ def _get_line_costs(lines_added): .sum() .loc[lines_added_unique, ["quantity"]] ) + # use the minimum of quantity and num_parallel, as sometimes lines are added + # and in a next reinforcement step removed again, e.g. when feeder is split + # at 2/3 and a new single standard line is added + lines_added = pd.merge( + lines_added, + edisgo_obj.topology.lines_df.loc[:, ["num_parallel"]], + how="left", + left_index=True, + right_index=True, + ) + lines_added["quantity_added"] = lines_added.loc[ + :, ["quantity", "num_parallel"] + ].min(axis=1) lines_added["length"] = edisgo_obj.topology.lines_df.loc[ lines_added.index, "length" ] @@ -176,9 +194,9 @@ def _get_line_costs(lines_added): ].values, "total_costs": line_costs.costs.values, "length": ( - lines_added.quantity * lines_added.length + lines_added.quantity_added * lines_added.length ).values, - "quantity": lines_added.quantity.values, + "quantity": lines_added.quantity_added.values, "voltage_level": line_costs.voltage_level.values, }, index=lines_added.index, @@ -288,3 +306,69 @@ def line_expansion_costs(edisgo_obj, lines_names=None): ] ) return costs_lines.loc[lines_df.index] + + +def transformer_expansion_costs(edisgo_obj, transformer_names=None): + """ + Returns costs per transformer in kEUR as well as voltage level they are in. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + eDisGo object + transformer_names: None or list(str) + List of names of transformers to return cost information for. If None, it is + returned for all transformers in + :attr:`~.network.topology.Topology.transformers_df` and + :attr:`~.network.topology.Topology.transformers_hvmv_df`. + + Returns + ------- + costs: :pandas:`pandas.DataFrame` + Dataframe with names of transformers in index and columns 'costs' with + costs per transformer in kEUR and 'voltage_level' with information on voltage + level the transformer is in. + + """ + transformers_df = pd.concat( + [ + edisgo_obj.topology.transformers_df.copy(), + edisgo_obj.topology.transformers_hvmv_df.copy(), + ] + ) + if transformer_names is not None: + transformers_df = transformers_df.loc[transformer_names, ["type_info"]] + + if len(transformers_df) == 0: + return pd.DataFrame(columns=["costs", "voltage_level"]) + + hvmv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_hvmv_df.index) + ].index + mvlv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_df.index) + ].index + + costs_hvmv = float(edisgo_obj.config["costs_transformers"]["mv"]) + costs_mvlv = float(edisgo_obj.config["costs_transformers"]["lv"]) + + costs_df = pd.DataFrame( + { + "costs": costs_hvmv, + "voltage_level": "hv/mv", + }, + index=hvmv_transformers, + ) + costs_df = pd.concat( + [ + costs_df, + pd.DataFrame( + { + "costs": costs_mvlv, + "voltage_level": "mv/lv", + }, + index=mvlv_transformers, + ), + ] + ) + return costs_df diff --git a/tests/flex_opt/test_costs.py b/tests/flex_opt/test_costs.py index 813714aa8..308f9e7e6 100644 --- a/tests/flex_opt/test_costs.py +++ b/tests/flex_opt/test_costs.py @@ -3,7 +3,7 @@ import pytest from edisgo import EDisGo -from edisgo.flex_opt.costs import grid_expansion_costs, line_expansion_costs +from edisgo.flex_opt import costs as costs_mod class TestCosts: @@ -76,12 +76,12 @@ def test_costs(self): ], ) - costs = grid_expansion_costs(self.edisgo) + costs = costs_mod.grid_expansion_costs(self.edisgo) assert len(costs) == 4 assert ( costs.loc["MVStation_1_transformer_reinforced_2", "voltage_level"] - == "mv/lv" + == "hv/mv" ) assert costs.loc["MVStation_1_transformer_reinforced_2", "quantity"] == 1 assert costs.loc["MVStation_1_transformer_reinforced_2", "total_costs"] == 1000 @@ -97,13 +97,13 @@ def test_costs(self): assert costs.loc["Line_10019", "type"] == "48-AL1/8-ST1A" assert costs.loc["Line_10019", "voltage_level"] == "mv" assert np.isclose(costs.loc["Line_50000002", "total_costs"], 2.34) - assert np.isclose(costs.loc["Line_50000002", "length"], 0.09) - assert costs.loc["Line_50000002", "quantity"] == 3 + assert np.isclose(costs.loc["Line_50000002", "length"], 0.03) + assert costs.loc["Line_50000002", "quantity"] == 1 assert costs.loc["Line_50000002", "type"] == "NAYY 4x1x35" assert costs.loc["Line_50000002", "voltage_level"] == "lv" def test_line_expansion_costs(self): - costs = line_expansion_costs(self.edisgo) + costs = costs_mod.line_expansion_costs(self.edisgo) assert len(costs) == len(self.edisgo.topology.lines_df) assert (costs.index == self.edisgo.topology.lines_df.index).all() assert len(costs[costs.voltage_level == "mv"]) == len( @@ -116,7 +116,9 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" - costs = line_expansion_costs(self.edisgo, ["Line_10003", "Line_10000015"]) + costs = costs_mod.line_expansion_costs( + self.edisgo, ["Line_10003", "Line_10000015"] + ) assert len(costs) == 2 assert (costs.index.values == ["Line_10003", "Line_10000015"]).all() assert np.isclose(costs.at["Line_10003", "costs_earthworks"], 0.083904 * 60) @@ -125,3 +127,28 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_earthworks"], 1.53) assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" + + def test_transformer_expansion_costs(self): + costs = costs_mod.transformer_expansion_costs(self.edisgo) + transformers_df = pd.concat( + [ + self.edisgo.topology.transformers_df, + self.edisgo.topology.transformers_hvmv_df, + ] + ) + assert len(costs) == len(transformers_df) + assert sorted(costs.index) == sorted(transformers_df.index) + assert len(costs[costs.voltage_level == "hv/mv"]) == len( + self.edisgo.topology.transformers_hvmv_df + ) + assert np.isclose(costs.at["MVStation_1_transformer_1", "costs"], 1000) + assert costs.at["MVStation_1_transformer_1", "voltage_level"] == "hv/mv" + assert np.isclose(costs.at["LVStation_4_transformer_2", "costs"], 10) + assert costs.at["LVStation_4_transformer_2", "voltage_level"] == "mv/lv" + + costs = costs_mod.transformer_expansion_costs( + self.edisgo, ["LVStation_5_transformer_1"] + ) + assert len(costs) == 1 + assert np.isclose(costs.at["LVStation_5_transformer_1", "costs"], 10) + assert costs.at["LVStation_5_transformer_1", "voltage_level"] == "mv/lv" diff --git a/tests/flex_opt/test_reinforce_grid.py b/tests/flex_opt/test_reinforce_grid.py index cb6074310..e073b2326 100644 --- a/tests/flex_opt/test_reinforce_grid.py +++ b/tests/flex_opt/test_reinforce_grid.py @@ -81,7 +81,7 @@ def test_run_separate_lv_grids(self): assert len(g.buses_df) > 1 assert len(lv_grids_new) == 26 - assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 280.06) + assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 440.06) # check if all generators are still present assert np.isclose( diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index 1f09e62d9..e0379bd59 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -521,7 +521,7 @@ def test_reinforce_catch_convergence(self): ) results = self.edisgo.reinforce(catch_convergence_problems=True) assert results.unresolved_issues.empty - assert len(results.grid_expansion_costs) == 132 + assert len(results.grid_expansion_costs) == 134 assert len(results.equipment_changes) == 218 assert results.v_res.shape == (4, 142) @@ -543,7 +543,7 @@ def test_enhanced_reinforce_grid(self): results = edisgo_obj.results - assert len(results.grid_expansion_costs) == 445 + assert len(results.grid_expansion_costs) == 454 assert len(results.equipment_changes) == 892 assert results.v_res.shape == (4, 148) From fd75b510ef9491844c6702fcf5c51d516678ae85 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 24 Apr 2024 15:03:07 +0200 Subject: [PATCH 048/141] Fix download of simbev and tracbev files --- examples/electromobility_example.ipynb | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/examples/electromobility_example.ipynb b/examples/electromobility_example.ipynb index 5bc23ea1d..bfc97335f 100644 --- a/examples/electromobility_example.ipynb +++ b/examples/electromobility_example.ipynb @@ -430,8 +430,7 @@ " url = (f\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " f\"tests/data/simbev_example_scenario/{ags}/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"csv\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".csv\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(f\"{raw_url}/{ags}/{file}\")\n", @@ -470,8 +469,7 @@ " url = (\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " \"tests/data/tracbev_example_scenario/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"gpkg\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".gpkg\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(\n", From 0de7d2314c6e4aa6a291f5704751268fbb8d4346 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Wed, 3 Jul 2024 13:14:39 +0200 Subject: [PATCH 049/141] Fix: Correct Matplotlib Attribute Error --- edisgo/tools/plots.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/edisgo/tools/plots.py b/edisgo/tools/plots.py index 381c73754..3a1207788 100644 --- a/edisgo/tools/plots.py +++ b/edisgo/tools/plots.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING import matplotlib -import matplotlib.cm as cm import numpy as np import pandas as pd import plotly.graph_objects as go @@ -604,7 +603,7 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): bus_sizes, bus_colors = nodes_by_costs( pypsa_plot.buses.index, grid_expansion_costs, edisgo_obj ) - bus_cmap = plt.cm.get_cmap(lines_cmap) + bus_cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) elif node_color == "curtailment": bus_sizes = nodes_curtailment(pypsa_plot.buses.index, curtailment_df) bus_colors = "orangered" @@ -681,7 +680,8 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): line_width = pypsa_plot.lines.s_nom * scaling_factor_line_width else: line_width = 2 - cmap = plt.cm.get_cmap(lines_cmap) + cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) + ll = pypsa_plot.plot( line_colors=line_colors, line_cmap=cmap, @@ -888,7 +888,7 @@ def color_map_color( """ norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax) if isinstance(cmap_name, str): - cmap = cm.get_cmap(cmap_name) + cmap = matplotlib.pyplot.colormaps.get_cmap(cmap_name) else: cmap = matplotlib.colors.LinearSegmentedColormap.from_list("mycmap", cmap_name) rgb = cmap(norm(abs(value)))[:3] From ea61d12eb43502e59894e1fc073615c373593e91 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Wed, 3 Jul 2024 14:27:02 +0200 Subject: [PATCH 050/141] Add check for missing annual demand in loads - Raise AttributeError if loads contain missing annual consumption data. --- edisgo/network/timeseries.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 8da353ff2..d2059d2d6 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1448,6 +1448,13 @@ def predefined_conventional_loads_by_sector( load_names = self._check_if_components_exist(edisgo_object, load_names, "loads") loads_df = edisgo_object.topology.loads_df.loc[load_names, :] + # check if loads contain annual demand + if not all(loads_df.annual_consumption.notnull()): + raise AttributeError( + "The following loads have missing information on annual consumption: " + f"{loads_df[~loads_df.annual_consumption.notnull()].index.values}." + ) + # scale time series by annual consumption ts_scaled = loads_df.apply( lambda x: ts_loads[x.sector] * x.annual_consumption, From 56f32d50bf2506c40a0ebe32225adf4c53575906 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 09:11:57 +0200 Subject: [PATCH 051/141] change AttributeError to Warning for missing annual consumption data --- edisgo/network/timeseries.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index d2059d2d6..0b7f39999 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1450,9 +1450,9 @@ def predefined_conventional_loads_by_sector( # check if loads contain annual demand if not all(loads_df.annual_consumption.notnull()): - raise AttributeError( - "The following loads have missing information on annual consumption: " - f"{loads_df[~loads_df.annual_consumption.notnull()].index.values}." + raise Warning( + "Not all affected loads have information on annual consumption. Please " + "check and adapt if necessary." ) # scale time series by annual consumption From 591141a74b8ed4435dd8987f50527c417a7537f5 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 11:42:50 +0200 Subject: [PATCH 052/141] Fix docs link check and build issues --- doc/conf.py | 5 ++++- doc/index.rst | 2 +- doc/quickstart.rst | 4 ++-- doc/usage_details.rst | 2 +- edisgo/edisgo.py | 16 ++++++++-------- edisgo/io/dsm_import.py | 2 +- edisgo/io/generators_import.py | 4 ++-- edisgo/io/heat_pump_import.py | 2 +- edisgo/io/timeseries_import.py | 10 +++++----- edisgo/network/heat.py | 8 ++++---- edisgo/network/timeseries.py | 2 +- 11 files changed, 30 insertions(+), 27 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 851a3fb6d..ad1be2546 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -128,7 +128,10 @@ def setup(sphinx): "shapely.%s", ), "ding0": ("https://dingo.readthedocs.io/en/dev/api/ding0.html#%s", "ding0.%s"), - "pypsa": ("https://pypsa.readthedocs.io/en/latest/components.html#%s", "pypsa.%s"), + "pypsa": ( + "https://pypsa.readthedocs.io/en/latest/user-guide/components.html#%s", + "pypsa.%s", + ), "plotly": ( "https://plotly.com/python-api-reference/generated/%s.html", "plotly.%s", diff --git a/doc/index.rst b/doc/index.rst index dfebb4369..444e35f04 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -15,7 +15,7 @@ The toolbox currently includes: * `ding0 `_ tool for synthetic medium and low voltage grid topologies for the whole of Germany - * `OpenEnergy DataBase (oedb) `_ for + * `OpenEnergy DataBase (oedb) `_ for feed-in time series of fluctuating renewables and scenarios for future power plant park of Germany * `demandlib `_ for electrical load time series diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 4ce437c05..a4af052f7 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -188,8 +188,8 @@ Aside from grid topology data you may eventually need a dataset on future installation of power plants. You may therefore use the scenarios developed in the `open_eGo `_ project that are available in the -`OpenEnergy DataBase (oedb) `_ -hosted on the `OpenEnergy Platform (OEP) `_. +`OpenEnergy DataBase (oedb) `_ +hosted on the `OpenEnergy Platform (OEP) `_. eDisGo provides an interface to the oedb using the package `ego.io `_. ego.io gives you a python SQL-Alchemy representations of the oedb and access to it by using the diff --git a/doc/usage_details.rst b/doc/usage_details.rst index aac09982f..a35ba1e20 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -241,7 +241,7 @@ This mode can be invoked as follows: For the following components you can use existing time series: * Fluctuating generators: Feed-in time series for solar and wind power plants can be - retrieved from the `OpenEnergy DataBase `_. + retrieved from the `OpenEnergy DataBase `_. * Conventional loads: Standard load profiles for the different sectors residential, commercial, agricultural and industrial are generated using the oemof `demandlib `_. diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index dca1660b2..52e708b2d 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -417,7 +417,7 @@ def set_time_series_active_power_predefined( Technology- and weather cell-specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case @@ -478,7 +478,7 @@ def set_time_series_active_power_predefined( Sets active power demand time series using individual hourly electricity load time series for one year obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `engine` and `scenario` are provided. For further settings, the parameter `timeindex` can also be @@ -933,7 +933,7 @@ def import_generators(self, generator_scenario=None, **kwargs): Gets generator park for specified scenario and integrates generators into grid. The generator data is retrieved from the - `open energy platform `_. Decommissioned + `open energy platform `_. Decommissioned generators are removed from the grid, generators with changed capacity updated and new generators newly integrated into the grid. @@ -1928,7 +1928,7 @@ def import_electromobility( Imports electromobility data and integrates charging points into grid. Electromobility data can be obtained from the `OpenEnergy DataBase - `_ or from self-provided + `_ or from self-provided data. In case you want to use self-provided data, it needs to be generated using the tools `SimBEV `_ (required version: @@ -1960,7 +1960,7 @@ def import_electromobility( * "oedb" Electromobility data is obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `scenario` and `engine` are provided. @@ -2143,7 +2143,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. # ToDo Add information on scenarios and from which tables data is retrieved. @@ -2305,7 +2305,7 @@ def apply_heat_pump_operating_strategy( def import_dsm(self, scenario: str, engine: Engine, timeindex=None): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. The data is written to the @@ -2356,7 +2356,7 @@ def import_home_batteries( between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. The batteries are integrated into the grid (added to :attr:`~.network.topology.Topology.storage_units_df`) based on their building diff --git a/edisgo/io/dsm_import.py b/edisgo/io/dsm_import.py index fac856d9f..648ae5039 100644 --- a/edisgo/io/dsm_import.py +++ b/edisgo/io/dsm_import.py @@ -28,7 +28,7 @@ def oedb( ): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. diff --git a/edisgo/io/generators_import.py b/edisgo/io/generators_import.py index e9628c6d9..281e78a04 100755 --- a/edisgo/io/generators_import.py +++ b/edisgo/io/generators_import.py @@ -42,9 +42,9 @@ def oedb_legacy(edisgo_object, generator_scenario, **kwargs): The importer uses SQLAlchemy ORM objects. These are defined in `ego.io `_. The data is imported from the tables - `conventional power plants `_ and - `renewable power plants `_. When the generator data is retrieved, the following steps are conducted: diff --git a/edisgo/io/heat_pump_import.py b/edisgo/io/heat_pump_import.py index bb80064b7..69133ca11 100644 --- a/edisgo/io/heat_pump_import.py +++ b/edisgo/io/heat_pump_import.py @@ -583,7 +583,7 @@ def _grid_integration( def efficiency_resistive_heaters_oedb(scenario, engine): """ Get efficiency of resistive heaters from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 83e4afc65..5d154b965 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -74,7 +74,7 @@ def _timeindex_helper_func( def feedin_oedb_legacy(edisgo_object, timeindex=None): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -158,7 +158,7 @@ def feedin_oedb( ): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -319,7 +319,7 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): """ Get COP (coefficient of performance) time series data from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -377,7 +377,7 @@ def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): def heat_demand_oedb(edisgo_obj, scenario, engine, timeindex=None): """ Get heat demand profiles for heat pumps from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Heat demand data is returned for all heat pumps in the grid. For more information on how individual heat demand profiles are obtained see @@ -498,7 +498,7 @@ def electricity_demand_oedb( ): """ Get electricity demand profiles for all conventional loads from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Conventional loads comprise conventional electricity applications in the residential, CTS and industrial sector. diff --git a/edisgo/network/heat.py b/edisgo/network/heat.py index 22403d0f2..1762f56c7 100644 --- a/edisgo/network/heat.py +++ b/edisgo/network/heat.py @@ -131,7 +131,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): Write COP time series for heat pumps to py:attr:`~cop_df`. COP time series can either be given to this function or be obtained from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -150,7 +150,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): * 'oedb' COP / efficiency data are obtained from the `OpenEnergy DataBase - `_. + `_. In case of heat pumps weather cell specific hourly COP time series are obtained (see :func:`edisgo.io.timeseries_import.cop_oedb` for more information). Using information on which weather cell each heat pump @@ -317,7 +317,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): Write heat demand time series of heat pumps to py:attr:`~heat_demand_df`. Heat demand time series can either be given to this function or be obtained from - the `OpenEnergy DataBase `_. + the `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -336,7 +336,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): * 'oedb' Heat demand time series are obtained from the `OpenEnergy DataBase - `_ (see + `_ (see :func:`edisgo.io.timeseries_import.heat_demand_oedb` for more information). Time series are only obtained for heat pumps that are already integrated diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 8da353ff2..585240e98 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1204,7 +1204,7 @@ def predefined_fluctuating_generators_by_technology( Technology and weather cell specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case From 41888aa0bb1e2d705d303b5aefd55c790c833ed9 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 11:43:59 +0200 Subject: [PATCH 053/141] update installation instructions --- doc/dev_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/dev_notes.rst b/doc/dev_notes.rst index e012ce874..953ae282b 100644 --- a/doc/dev_notes.rst +++ b/doc/dev_notes.rst @@ -24,7 +24,7 @@ following commands within your eDisGo directory: .. code-block:: bash - python -m pip install -e .[full] # install eDisGo from source + python -m pip install -e .[dev] # install eDisGo from source pre-commit install # install pre-commit hooks From 64a4f7b7412e4f898e7997a3e8bf36c2a9e646be Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 11:54:02 +0200 Subject: [PATCH 054/141] Update PyPSA power flow analysis link in EDisGo class --- edisgo/edisgo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 52e708b2d..3930a5659 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -1003,7 +1003,7 @@ def analyze( Conducts a static, non-linear power flow analysis. Conducts a static, non-linear power flow analysis using - `PyPSA `_ and writes results (active, reactive and apparent power as well as current on lines and voltages at buses) to :class:`~.network.results.Results` From 6ec035345f0633bd76dcebc1e66932a0dec14707 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 12:08:17 +0200 Subject: [PATCH 055/141] Update error handling for missing annual consumption data --- edisgo/network/timeseries.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 0b7f39999..2d8bb0b9e 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1450,9 +1450,8 @@ def predefined_conventional_loads_by_sector( # check if loads contain annual demand if not all(loads_df.annual_consumption.notnull()): - raise Warning( - "Not all affected loads have information on annual consumption. Please " - "check and adapt if necessary." + raise AttributeError( + "The annual consumption of some loads is missing. Please provide " ) # scale time series by annual consumption From af1b79d2d7c78216c0f652649dd9fe2ef1d1fe64 Mon Sep 17 00:00:00 2001 From: Jonas Danke Date: Mon, 8 Jul 2024 13:49:24 +0200 Subject: [PATCH 056/141] Update installation instructions for edisgoOPF package --- doc/quickstart.rst | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/doc/quickstart.rst b/doc/quickstart.rst index a4af052f7..f0dad32d6 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -44,41 +44,41 @@ Installation using MacOS We don't have any experience with our package on MacOS yet! If you try eDisGo on MacOS we would be happy if you let us know about your experience! -Requirements for edisgoOPF package ----------------------------------- +edisgoOPF Package +----------------- -.. warning:: The non-linear optimal power flow is currently not maintained and might not work out of the box! +Introduction +^^^^^^^^^^^^^ +edisgoOPF is a Python package designed for optimal power flow (OPF) calculations in electrical distribution systems. This package leverages the power of Julia, a high-performance programming language for technical computing, to perform efficient OPF calculations. -To use the multiperiod optimal power flow that is provided in the julia package -edisgoOPF in eDisGo you additionally need to install julia version 1.1.1. -Download julia from -`julia download page `_ and -add it to your path (see -`platform specific instructions `_ -for more information). -Before using the edisgoOPF julia package for the first time you need to -instantiate it. Therefore, in a terminal change directory to the edisgoOPF -package located in eDisGo/edisgo/opf/edisgoOPF and call julia from there. -Change to package mode by typing -.. code-block:: bash +To use edisgoOPF effectively, you'll need to set up a specific environment that includes: - ] +1. **Julia**: Version 1.6.7 is required. +2. **Gurobi Optimizer**: A powerful optimization solver. -Then activate the package: +Installation Steps +^^^^^^^^^^^^^^^^^^^ + +1. Install Julia 1.6.7 -.. code-block:: bash - (v1.0) pkg> activate . +Download Julia 1.6.7 from the `Julia LTS releases page `_. -And finally instantiate it: +For Linux users, you can use the following commands: .. code-block:: bash - (SomeProject) pkg> instantiate + wget https://julialang-s3.julialang.org/bin/linux/x64/1.6/julia-1.6.7-linux-x86_64.tar.gz + tar zxvf julia-1.6.7-linux-x86_64.tar.gz + export PATH="$PATH:/julia-1.6.7/bin" + +2. Install Gurobi + + +Follow the `Gurobi installation guide `_ to install Gurobi and add it to your system path. -.. _prerequisites: Additional linear solver ^^^^^^^^^^^^^^^^^^^^^^^^^ From 7db9daf2012bd35224342cb014337aa18bd87036 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 8 Jul 2024 17:07:55 +0200 Subject: [PATCH 057/141] update setup.py to newest pypsa and pyomo versions --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 4f570f745..0d83f01f4 100644 --- a/setup.py +++ b/setup.py @@ -50,10 +50,10 @@ def read(fname): "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release + # "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa", "pyyaml", "saio", "scikit-learn <= 1.1.1", From 579eb5647c796c3e06e4db3311a871c781d820b2 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 8 Jul 2024 17:13:59 +0200 Subject: [PATCH 058/141] add linkcheck_ignore gurobi --- doc/conf.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index ad1be2546..134f16573 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -138,9 +138,14 @@ def setup(sphinx): ), } # ignore the following external links when checking the links -# stackoverflow is listed here because for some reason the link check fails for these +# stackoverflow and gurobi is listed here because for some reason +# the link check fails for these # in the github action, even though the link is correct -linkcheck_ignore = [r"https://stackoverflow.com*"] +linkcheck_ignore = [ + r"https://stackoverflow.com*", + r"https://support.gurobi.com/hc/en-us/articles\ + /14799677517585-Getting-Started-with-Gurobi-Optimizer", +] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] From a090f0d8c0a6a29d7b966e23a2095f80f0aceeb6 Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 8 Jul 2024 17:19:09 +0200 Subject: [PATCH 059/141] Update conf.py --- doc/conf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 134f16573..2b2867de2 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -143,8 +143,7 @@ def setup(sphinx): # in the github action, even though the link is correct linkcheck_ignore = [ r"https://stackoverflow.com*", - r"https://support.gurobi.com/hc/en-us/articles\ - /14799677517585-Getting-Started-with-Gurobi-Optimizer", + r"https://support.gurobi.com/*", ] # Add any paths that contain templates here, relative to this directory. From c400c804a791ef78d930349c35f42b37e34f2c79 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 11:33:04 +0200 Subject: [PATCH 060/141] add pytest.mark.oedbtest for failing tests due to oedb error --- tests/conftest.py | 10 ++++++++++ tests/io/test_generators_import.py | 4 ++++ tests/io/test_timeseries_import.py | 2 ++ tests/network/test_timeseries.py | 1 + tests/test_edisgo.py | 1 + tests/test_examples.py | 2 ++ tests/tools/test_tools.py | 1 + 7 files changed, 21 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 6809eb81a..8dedbc6cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -57,6 +57,9 @@ def pytest_addoption(parser): default=False, help="run tests that only work locally", ) + parser.addoption( + "--runoedbtest", action="store_true", default=False, help="Run OEDB tests" + ) def pytest_collection_modifyitems(config, items): @@ -75,3 +78,10 @@ def pytest_collection_modifyitems(config, items): for item in items: if "runonlinux" in item.keywords: item.add_marker(skip_windows) + if config.getoption("--runoedbtest"): + # If the --runoedbtest option is specified, do not skip any tests + return + skip_oedbtest = pytest.mark.skip(reason="Need --runoedbtest option to run") + for item in items: + if "oedbtest" in item.keywords: + item.add_marker(skip_oedbtest) diff --git a/tests/io/test_generators_import.py b/tests/io/test_generators_import.py index 9adfdde1e..51ee4b5ee 100644 --- a/tests/io/test_generators_import.py +++ b/tests/io/test_generators_import.py @@ -484,6 +484,7 @@ class TestGeneratorsImportOEDB: """ @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_without_timeseries(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, @@ -497,6 +498,7 @@ def test_oedb_legacy_without_timeseries(self): assert np.isclose(edisgo.topology.generators_df.p_nom.sum(), 20.18783) @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_worst_case_timeseries(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.set_time_series_worst_case_analysis() @@ -568,6 +570,7 @@ def test_oedb_legacy_with_worst_case_timeseries(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_timeseries_by_technology(self): timeindex = pd.date_range("1/1/2012", periods=3, freq="H") ts_gen_dispatchable = pd.DataFrame( @@ -647,6 +650,7 @@ def test_oedb_legacy_with_timeseries_by_technology(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_target_capacity(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 93825e9ac..2bae3cf27 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -59,6 +59,7 @@ def test__timeindex_helper_func(self): assert_index_equal(ind, given_index) assert_index_equal(ind_full, timeindex) + @pytest.mark.oedbtest def test_feedin_oedb_legacy(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) timeindex = pd.date_range("1/1/2010", periods=3000, freq="H") @@ -86,6 +87,7 @@ def test_feedin_oedb(self): assert feedin_df.shape == (6, 4) assert_index_equal(feedin_df.index, timeindex) + @pytest.mark.oedbtest def test_load_time_series_demandlib(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) timeindex = pd.date_range("1/1/2018", periods=7000, freq="H") diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 2c05eda49..3818aaf3b 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1235,6 +1235,7 @@ def test_worst_case_storage_units(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_predefined_fluctuating_generators_by_technology(self): timeindex = pd.date_range("1/1/2011 12:00", periods=2, freq="H") self.edisgo.timeseries.timeindex = timeindex diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index e0379bd59..dbfa03a33 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -382,6 +382,7 @@ def test_to_graph(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_generator_import(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.import_generators("nep2035") diff --git a/tests/test_examples.py b/tests/test_examples.py index 8e31d88d9..9f0a862eb 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -26,6 +26,7 @@ def test_plot_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_electromobility_example_ipynb(self): path = os.path.join(self.examples_dir_path, "electromobility_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) @@ -39,6 +40,7 @@ def test_electromobility_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_edisgo_simple_example_ipynb(self): path = os.path.join(self.examples_dir_path, "edisgo_simple_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 606c60d81..40c34a63b 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -168,6 +168,7 @@ def test_determine_bus_voltage_level(self): assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_6) == 6 assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_7) == 7 + @pytest.mark.oedbtest def test_get_weather_cells_intersecting_with_grid_district(self): weather_cells = tools.get_weather_cells_intersecting_with_grid_district( self.edisgo From 3faa30bb3e4a355824ec7f6030d7f497e420c0a5 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 11:38:35 +0200 Subject: [PATCH 061/141] Add pytest.mark.oedbtest for failing tests due to oedb error --- tests/network/test_timeseries.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 3818aaf3b..8cef16dc9 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1550,6 +1550,7 @@ def test_predefined_dispatchable_generators_by_technology(self): ) # fmt: on + @pytest.mark.oedbtest # WARNING: This is NO oedb error, but a demandlib error def test_predefined_conventional_loads_by_sector(self, caplog): index = pd.date_range("1/1/2018", periods=3, freq="H") self.edisgo.timeseries.timeindex = index From 22450da42d78ebfe5d8f3fad96975e32c3932fcb Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 11:54:30 +0200 Subject: [PATCH 062/141] update demandlib dependency to version < 0.2.0 due to demandlib error --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0d83f01f4..17e87f85a 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ def read(fname): requirements = [ "contextily", "dash < 2.9.0", - "demandlib", + "demandlib < 0.2.0", "descartes", "egoio >= 0.4.7", "geoalchemy2 < 0.7.0", From 90dc28b307967561fd4b71b8381b530b78621418 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 12:03:19 +0200 Subject: [PATCH 063/141] Remove pytest.mark.oedbtest for demandlib error --- tests/network/test_timeseries.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 8cef16dc9..3818aaf3b 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1550,7 +1550,6 @@ def test_predefined_dispatchable_generators_by_technology(self): ) # fmt: on - @pytest.mark.oedbtest # WARNING: This is NO oedb error, but a demandlib error def test_predefined_conventional_loads_by_sector(self, caplog): index = pd.date_range("1/1/2018", periods=3, freq="H") self.edisgo.timeseries.timeindex = index From 4dcc0eeedcd70cf8e6a391c88d470748ee4ce4ec Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 12:11:44 +0200 Subject: [PATCH 064/141] Revert "update setup.py to newest pypsa and pyomo versions" This reverts commit 7db9daf2012bd35224342cb014337aa18bd87036. --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 17e87f85a..b77dba80f 100644 --- a/setup.py +++ b/setup.py @@ -50,10 +50,10 @@ def read(fname): "plotly", "pydot", "pygeos", - # "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release + "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa", + "pypsa >= 0.17.0, <= 0.20.1", "pyyaml", "saio", "scikit-learn <= 1.1.1", From 7c4cbbbdc8771673cae09f8452adc029271fa0bc Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 17:12:52 +0200 Subject: [PATCH 065/141] Remove pytest.mark.oedbtest for demandlib error --- tests/io/test_timeseries_import.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 2bae3cf27..17340163b 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -87,7 +87,6 @@ def test_feedin_oedb(self): assert feedin_df.shape == (6, 4) assert_index_equal(feedin_df.index, timeindex) - @pytest.mark.oedbtest def test_load_time_series_demandlib(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) timeindex = pd.date_range("1/1/2018", periods=7000, freq="H") From 47abe91ccadb56e4f04105f150a38a5696fd1e48 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 9 Jul 2024 17:13:38 +0200 Subject: [PATCH 066/141] Skip oedbtest in pytest unless --runoedbtest option is specified --- tests/conftest.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8dedbc6cb..998adaebb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -78,10 +78,8 @@ def pytest_collection_modifyitems(config, items): for item in items: if "runonlinux" in item.keywords: item.add_marker(skip_windows) - if config.getoption("--runoedbtest"): - # If the --runoedbtest option is specified, do not skip any tests - return - skip_oedbtest = pytest.mark.skip(reason="Need --runoedbtest option to run") - for item in items: - if "oedbtest" in item.keywords: - item.add_marker(skip_oedbtest) + if not config.getoption("--runoedbtest"): + skip_oedbtest = pytest.mark.skip(reason="need --runoedbtest option to run") + for item in items: + if "oedbtest" in item.keywords: + item.add_marker(skip_oedbtest) From 04878dd2854342e68c6e587adec95c99e429593c Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 10 Jul 2024 16:08:58 +0200 Subject: [PATCH 067/141] Adapt installation documentation --- doc/quickstart.rst | 95 +++------------------------------------------- 1 file changed, 5 insertions(+), 90 deletions(-) diff --git a/doc/quickstart.rst b/doc/quickstart.rst index f0dad32d6..2d64df52f 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -3,11 +3,11 @@ Getting started ================ +.. warning:: Make sure to use python 3.8 or higher! + Installation using Linux ------------------------- -.. warning:: Make sure to use python 3.8 or higher! - Install latest eDisGo version through pip. Therefore, we highly recommend using a virtual environment and its pip. @@ -21,8 +21,6 @@ You may also consider installing a developer version as detailed in Installation using Windows -------------------------- -.. warning:: Make sure to use python 3.8 or higher! - For Windows users we recommend using Anaconda and to install the geo stack using the conda-forge channel prior to installing eDisGo. You may use the provided `eDisGo_env.yml file `_ @@ -44,16 +42,10 @@ Installation using MacOS We don't have any experience with our package on MacOS yet! If you try eDisGo on MacOS we would be happy if you let us know about your experience! -edisgoOPF Package ------------------ +Additional requirements for Optimal Power Flow +--------------------------------------------------- -Introduction -^^^^^^^^^^^^^ -edisgoOPF is a Python package designed for optimal power flow (OPF) calculations in electrical distribution systems. This package leverages the power of Julia, a high-performance programming language for technical computing, to perform efficient OPF calculations. - - - -To use edisgoOPF effectively, you'll need to set up a specific environment that includes: +In order to use the optimal power flow, you additionally need: 1. **Julia**: Version 1.6.7 is required. 2. **Gurobi Optimizer**: A powerful optimization solver. @@ -79,83 +71,6 @@ For Linux users, you can use the following commands: Follow the `Gurobi installation guide `_ to install Gurobi and add it to your system path. - -Additional linear solver -^^^^^^^^^^^^^^^^^^^^^^^^^ - -As with the default linear solver in Ipopt (local solver used in the OPF) -the limit for prolem sizes is reached quite quickly, you may want to instead use -the solver HSL_MA97. -The steps required to set up HSL are also described in the -`Ipopt Documentation `_. -Here is a short version for reference: - -First, you need to obtain an academic license for HSL Solvers. -Under https://licences.stfc.ac.uk/product/coin-hsl download the sources for Coin-HSL Full (Stable). -You will need to provide an institutional e-mail to gain access. - -Unpack the tar.gz: - -.. code-block:: bash - - tar -xvzf coinhsl-2014.01.10.tar.gz - -To install the solver, clone the Ipopt Third Party HSL tools: - -.. code-block:: bash - - git clone https://github.com/coin-or-tools/ThirdParty-HSL.git - cd ThirdParty-HSL - - -Under `ThirdParty-HSL`, create a folder for the HSL sources named `coinhsl` and -copy the contents of the HSL archive into it. -Under Ubuntu, you'll need BLAS, LAPACK and GCC for Fortran. If you don't have them, install them via: - -.. code-block:: bash - - sudo apt-get install libblas-dev liblapack-dev gfortran - -You can then configure and install your HSL Solvers: - -.. code-block:: bash - - ./configure - make - sudo make install - -To make Ipopt pick up the solver, you need to add it to your path. -During install, there will be an output that tells you where the libraries have -been put. Usually like this: - -.. code-block:: bash - - Libraries have been installed in: - /usr/local/lib - - -Add this path to the variable `LD_LIBRARY_PATH`: - -.. code-block:: bash - - export LD_LIBRARY="/usr/local/bin":$LD_LIBRARY_PATH - -You might also want to add this to your .bashrc to make it persistent. - -For some reason, Ipopt looks for a library named `libhsl.so`, which is not what -the file is named, so we'll also need to provide a symlink: - -.. code-block:: bash - - cd /usr/local/lib - ln -s libcoinhsl.so libhsl.so - -MA97 should now work and can be called from Julia with: - -.. code-block:: julia - - JuMP.setsolver(pm.model,IpoptSolver(linear_solver="ma97")) - Prerequisites ------------- From 2f06b51bd3b0b4b614223f6d905eac50eccd22c2 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 10 Jul 2024 17:17:57 +0200 Subject: [PATCH 068/141] Update installation instructions for Julia --- doc/quickstart.rst | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 2d64df52f..04f12fbaf 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -58,13 +58,8 @@ Installation Steps Download Julia 1.6.7 from the `Julia LTS releases page `_. -For Linux users, you can use the following commands: +Install Julia by following the instructions in the `Julia installation guide `_. Make sure to add Julia to your system path. -.. code-block:: bash - - wget https://julialang-s3.julialang.org/bin/linux/x64/1.6/julia-1.6.7-linux-x86_64.tar.gz - tar zxvf julia-1.6.7-linux-x86_64.tar.gz - export PATH="$PATH:/julia-1.6.7/bin" 2. Install Gurobi From 840af44d8e6b84fc4101760e63a171fbfe53ed1d Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 10 Jul 2024 19:15:44 +0200 Subject: [PATCH 069/141] Fix error message typo by removing extra space --- edisgo/network/timeseries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 2d8bb0b9e..a2039702c 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -1451,7 +1451,7 @@ def predefined_conventional_loads_by_sector( # check if loads contain annual demand if not all(loads_df.annual_consumption.notnull()): raise AttributeError( - "The annual consumption of some loads is missing. Please provide " + "The annual consumption of some loads is missing. Please provide" ) # scale time series by annual consumption From b704d2ca30335245cf256dc9554ca6b74bc6b95e Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 10 Jul 2024 19:17:02 +0200 Subject: [PATCH 070/141] Add test for missing annual demand --- tests/network/test_timeseries.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 2c05eda49..b06fc945f 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1793,6 +1793,26 @@ def test_predefined_conventional_loads_by_sector(self, caplog): ).values, ).all() + # test Error if 'annual_consumption' is missing + # Save the original 'annual_consumption' values + original_annual_consumption = self.edisgo.topology.loads_df[ + "annual_consumption" + ].copy() + # Set 'annual_consumption' to None for the test + self.edisgo.topology.loads_df["annual_consumption"] = None + with pytest.raises(AttributeError) as exc_info: + self.edisgo.timeseries.predefined_conventional_loads_by_sector( + self.edisgo, "demandlib" + ) + assert ( + exc_info.value.args[0] + == "The annual consumption of some loads is missing. Please provide" + ) + # Restore the original 'annual_consumption' values + self.edisgo.topology.loads_df[ + "annual_consumption" + ] = original_annual_consumption + def test_predefined_charging_points_by_use_case(self, caplog): index = pd.date_range("1/1/2018", periods=3, freq="H") self.edisgo.timeseries.timeindex = index From 35249d75172af56157e6bed24eb02b8cda0af2b9 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 15 Jul 2024 13:15:42 +0200 Subject: [PATCH 071/141] Update Python versions in tests-coverage workflow --- .github/workflows/tests-coverage.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 951972d4d..062ccd035 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -20,13 +20,13 @@ jobs: include: - name-suffix: "coverage" os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: 3.9 + python-version: 3.10 - name-suffix: "basic" os: windows-latest - python-version: 3.8 + python-version: 3.9 steps: - name: Checkout repo @@ -71,7 +71,7 @@ jobs: python -m pytest --runslow --disable-warnings --color=yes -v - name: Run tests, coverage and send to coveralls - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' && matrix.python-version == 3.9 && matrix.name-suffix == 'coverage' run: | pip install pytest pytest-notebook coveralls coverage run --source=edisgo -m pytest --runslow --runonlinux --disable-warnings --color=yes -v From 44fbc075b3e8dafe7b80c686491a6fae63a67c9d Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 15 Jul 2024 13:18:46 +0200 Subject: [PATCH 072/141] Update Python versions in tests-coverage workflow --- .github/workflows/tests-coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 062ccd035..0005070a1 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -23,7 +23,7 @@ jobs: python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: 3.10 + python-version: "3.10" - name-suffix: "basic" os: windows-latest python-version: 3.9 From a8c7c2e4f41eb734fc42726d0077a88ae7dfe815 Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 15 Jul 2024 16:31:49 +0200 Subject: [PATCH 073/141] Update dev_notes to python>=3.9 --- doc/dev_notes.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/dev_notes.rst b/doc/dev_notes.rst index 953ae282b..5d7764e48 100644 --- a/doc/dev_notes.rst +++ b/doc/dev_notes.rst @@ -18,8 +18,7 @@ Installation using Linux ~~~~~~~~~~~~~~~~~~~~~~~~ To set up a source installation using linux simply use a virtual environment and install -the source code with pip. Make sure to use python3.7 or higher (recommended -python3.8). **After** setting up your virtual environment and activating it run the +the source code with pip. Make sure to use python3.9 or higher. **After** setting up your virtual environment and activating it run the following commands within your eDisGo directory: .. code-block:: bash From b909a1a7593b4a4fab39e098c5c5763ffe270ccc Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 15 Jul 2024 16:32:35 +0200 Subject: [PATCH 074/141] Update quickstart.rst to python>=3.9 --- doc/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 04f12fbaf..bc537cc44 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -3,7 +3,7 @@ Getting started ================ -.. warning:: Make sure to use python 3.8 or higher! +.. warning:: Make sure to use python 3.9 or higher! Installation using Linux ------------------------- From 88d2c03552a616c346e497086b42743c73fdab7c Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 15 Jul 2024 16:41:53 +0200 Subject: [PATCH 075/141] Update eDisGo_env.yml Updated the python version to >=3.9<3.11 --- eDisGo_env.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 45b797e16..06e2b03e6 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, < 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona From 9b7f53c7604a83ea5263566e5cf9d543d2513de6 Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 15 Jul 2024 16:42:31 +0200 Subject: [PATCH 076/141] Update eDisGo_env_dev.yml Updated the python version to >=3.9<3.11 --- eDisGo_env_dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index ae86632ac..eb9cb30df 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, < 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona From fe017545d436c04db500be784f4798a383d04154 Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Mon, 15 Jul 2024 16:43:22 +0200 Subject: [PATCH 077/141] Update setup.py Updated the python version to >=3.9 --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index b77dba80f..0ccb320b3 100644 --- a/setup.py +++ b/setup.py @@ -4,9 +4,9 @@ from setuptools import find_packages, setup -if sys.version_info[:2] < (3, 8): +if sys.version_info[:2] < (3, 9): error = ( - "eDisGo requires Python 3.8 or later (%d.%d detected)." % sys.version_info[:2] + "eDisGo requires Python 3.9 or later (%d.%d detected)." % sys.version_info[:2] ) sys.stderr.write(error + "\n") sys.exit(1) From eb510223c9be28c802344f4751f5f54570261da1 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 12:42:00 +0200 Subject: [PATCH 078/141] improve python3.11 compatibility --- .github/workflows/tests-coverage.yml | 5 ++++- edisgo/network/topology.py | 2 +- setup.py | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 0005070a1..5468cf565 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -23,7 +23,10 @@ jobs: python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: "3.10" + python-version: 3.10 + - name-suffix: "basic" + os: ubuntu-latest + python-version: 3.11 - name-suffix: "basic" os: windows-latest python-version: 3.9 diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index f411bf312..2462314f0 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -2123,7 +2123,7 @@ def _choose_random_substation_id(): """ if comp_type == "generator": - random.seed(a=comp_data["generator_id"]) + random.seed(a=int(comp_data["generator_id"])) elif comp_type == "storage_unit": random.seed(a=len(self.storage_units_df)) else: diff --git a/setup.py b/setup.py index 0ccb320b3..efe327250 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ def read(fname): "pypsa >= 0.17.0, <= 0.20.1", "pyyaml", "saio", - "scikit-learn <= 1.1.1", + "scikit-learn < 1.3.0", "shapely >= 1.7.0", "sqlalchemy < 1.4.0", "sshtunnel", From 55c54d3a8d5800e2adac1fcecf0d1c69d7f20d4e Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 12:16:26 +0200 Subject: [PATCH 079/141] improve python3.11 compatibility --- .github/workflows/tests-coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 5468cf565..6bcc81f6d 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -23,7 +23,7 @@ jobs: python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: 3.10 + python-version: "3.10" - name-suffix: "basic" os: ubuntu-latest python-version: 3.11 From 6c1ed8034636e21947dbaa7050e76225f81dff17 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 13:20:58 +0200 Subject: [PATCH 080/141] Update pypsa dependency to version 0.26.2 --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index efe327250..f04a4b97c 100644 --- a/setup.py +++ b/setup.py @@ -50,10 +50,9 @@ def read(fname): "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa == 0.26.2", "pyyaml", "saio", "scikit-learn < 1.3.0", From 5b0df1b904bd9a9104e7d64b22422c881de0f7fb Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 14:14:44 +0200 Subject: [PATCH 081/141] improve python3.11 compatibility --- eDisGo_env.yml | 5 +++-- eDisGo_env_dev.yml | 5 +++-- rtd_requirements.txt | 7 +++---- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 06e2b03e6..bb82cfdac 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.9, < 3.11 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona @@ -16,6 +16,7 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 + - conda-forge::scikit-learn < 1.3.0 - pip: - eDisGo diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index eb9cb30df..988743029 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.9, < 3.11 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona @@ -16,6 +16,7 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 + - conda-forge::scikit-learn < 1.3.0 - pip: - -e .[dev] diff --git a/rtd_requirements.txt b/rtd_requirements.txt index dd3c393d5..d614ff5c0 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,5 +1,5 @@ dash < 2.9.0 -demandlib +demandlib < 0.2.0 egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash @@ -8,13 +8,12 @@ multiprocess networkx >= 2.5.0 pandas >= 1.4.0 plotly -pyomo >= 6.0 pypower pyproj >= 3.0.0 -pypsa >=0.17.0, <=0.20.1 +pypsa == 0.26.2 pyyaml saio -scikit-learn +scikit-learn < 1.3.0 sphinx sphinx_rtd_theme >=0.5.2 sphinx-autodoc-typehints From 7f2430aff10126b77814045f509e579211923a2f Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:14:27 +0200 Subject: [PATCH 082/141] delete unused curtailment.py --- edisgo/flex_opt/curtailment.py | 782 --------------------------------- 1 file changed, 782 deletions(-) delete mode 100644 edisgo/flex_opt/curtailment.py diff --git a/edisgo/flex_opt/curtailment.py b/edisgo/flex_opt/curtailment.py deleted file mode 100644 index 484df444d..000000000 --- a/edisgo/flex_opt/curtailment.py +++ /dev/null @@ -1,782 +0,0 @@ -import logging - -import pandas as pd - -from pyomo.environ import ( - ConcreteModel, - Constraint, - Objective, - Param, - Set, - Var, - minimize, -) -from pyomo.opt import SolverFactory - -from edisgo.io import pypsa_io - - -def voltage_based( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'voltage-based'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated depending - on the exceedance of the allowed voltage deviation at the nodes of the - generators. The higher the exceedance, the higher the curtailment. - - The optional parameter `voltage_threshold` specifies the threshold for the - exceedance of the allowed voltage deviation above which a generator is - curtailed. By default it is set to zero, meaning that all generators at - nodes with voltage deviations that exceed the allowed voltage deviation are - curtailed. Generators at nodes where the allowed voltage deviation is not - exceeded are not curtailed. In the case that the required curtailment - exceeds the weather-dependent availability of all generators with voltage - deviations above the specified threshold, the voltage threshold is lowered - in steps of 0.01 p.u. until the curtailment target can be met. - - Above the threshold, the curtailment is proportional to the exceedance of - the allowed voltage deviation. In order to find the linear relation between - the curtailment and the voltage difference a linear problem is formulated - and solved using the python package pyomo. See documentation for further - information. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - voltage_threshold: :obj:`float` - The node voltage below which no curtailment is assigned to the - respective generator if not necessary. Default: 0.0. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generator. - Possible options are: - - * 'cbc' - coin-or branch and cut solver - * 'glpk' - gnu linear programming kit solver - * any other available compatible with 'pyomo' like 'gurobi' - or 'cplex' - - Default: 'cbc' - - """ - - raise NotImplementedError - - voltage_threshold = pd.Series( - kwargs.get("voltage_threshold", 0.0), - index=curtailment_timeseries.index, - ) - solver = kwargs.get("solver", "cbc") - combined_analysis = kwargs.get("combined_analysis", False) - - # get the voltages at the generators - if not edisgo.network.pypsa.edisgo_mode: - voltages_lv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "lv")].index, - level="lv", - ) - else: - # if only MV topology was analyzed (edisgo_mode = 'mv') all LV - # generators are assigned the voltage at the corresponding station's - # primary side - lv_gens = generators[generators.voltage_level == "lv"] - voltages_lv_stations = edisgo.network.results.v_res( - nodes_df=[_.station for _ in lv_gens.grid.unique()], level="mv" - ) - voltages_lv_gens = pd.DataFrame() - for lv_gen in lv_gens.index: - voltages_lv_gens[repr(lv_gen)] = voltages_lv_stations[ - repr(lv_gen.grid.station) - ] - voltages_mv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "mv")].index, - level="mv", - ) - voltages_gens = voltages_lv_gens.join(voltages_mv_gens) - - # get allowed voltage deviations from config - if not combined_analysis: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["lv_feed-in_case_max_v_deviation"] - else: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - - # assign allowed voltage deviation to each generator - if not edisgo.network.pypsa.edisgo_mode: - # for edisgo_mode = None - - # get voltages at stations - grids = list(set(generators.grid)) - lv_stations = [_.station for _ in grids if "LVStation" in repr(_.station)] - voltage_lv_stations = edisgo.network.results.v_res( - nodes_df=lv_stations, level="lv" - ) - voltages_mv_station = edisgo.network.results.v_res( - nodes_df=[edisgo.network.mv_grid.station], level="mv" - ) - voltages_stations = voltage_lv_stations.join(voltages_mv_station) - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = generators.voltage_level.apply( - lambda _: allowed_voltage_diff_lv if _ == "lv" else allowed_voltage_dev_mv - ) - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - station = generators[generators.gen_repr == gen].grid.values[0].station - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_stations.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - else: - # for edisgo_mode = 'mv' - - station = edisgo.network.mv_grid.station - # get voltages at HV/MV station - voltages_station = edisgo.network.results.v_res(nodes_df=[station], level="mv") - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = allowed_voltage_dev_mv - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_station.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - # for every time step check if curtailment can be fulfilled, otherwise - # reduce voltage threshold; set feed-in of generators below voltage - # threshold to zero, so that they cannot be curtailed - for ts in curtailment_timeseries.index: - # get generators with voltage higher than threshold - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # if curtailment cannot be fulfilled lower voltage threshold - while sum(feedin.loc[ts, gen_pool]) < curtailment_timeseries.loc[ts]: - voltage_threshold.loc[ts] = voltage_threshold.loc[ts] - 0.01 - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # set feed-in of generators below voltage threshold to zero, so that - # they cannot be curtailed - gen_pool_out = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] <= voltage_threshold.loc[ts] - ].index - feedin.loc[ts, gen_pool_out] = 0 - - # only optimize for time steps where curtailment is greater than zero - timeindex = curtailment_timeseries[curtailment_timeseries > 0].index - if not timeindex.empty: - curtailment = _optimize_voltage_based_curtailment( - feedin, - voltage_gens_diff, - curtailment_timeseries, - voltage_threshold, - timeindex, - solver, - ) - else: - curtailment = pd.DataFrame() - - # set curtailment for other time steps to zero - curtailment = pd.concat( - [ - curtailment, - pd.DataFrame( - 0, - columns=feedin.columns, - index=curtailment_timeseries[curtailment_timeseries <= 0].index, - ), - ] - ) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _optimize_voltage_based_curtailment( - feedin, voltage_pu, total_curtailment, voltage_threshold, timeindex, solver -): - """ - Formulates and solves linear problem to find linear relation between - curtailment and node voltage. - - ToDo: adapt to refactored code! - - Parameters - ------------ - feedin : :pandas:`pandas.DataFrame` - See `feedin` parameter in - :func:`edisgo.flex_opt.curtailment.voltage_based` for more information. - voltage_pu : :pandas:`pandas.DataFrame - Dataframe containing voltages in p.u. at the generator nodes. Index - of the dataframe is a :pandas:`pandas.DatetimeIndex`, - columns are the generator representatives. - total_curtailment : :pandas:`pandas.Series` - Series containing the specific curtailment in kW to be allocated to the - generators. The index is a - :pandas:`pandas.DatetimeIndex`. - voltage_threshold : :pandas:`pandas.Series` - Series containing the voltage thresholds in p.u. below which no - generator curtailment will occur. The index is a - :pandas:`pandas.DatetimeIndex`. - solver : :obj:`str` - The solver used to optimize the linear problem. Default: 'cbc'. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - feed-in was provided for in `feedin` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - - """ - - raise NotImplementedError - - logging.debug("Start curtailment optimization.") - - v_max = voltage_pu.max(axis=1) - generators = feedin.columns - - # additional curtailment factors - cf_add = pd.DataFrame(index=timeindex) - for gen in generators: - cf_add[gen] = abs( - (voltage_pu.loc[timeindex, gen] - v_max[timeindex]) - / (voltage_threshold[timeindex] - v_max[timeindex]) - ) - - # curtailment factors - cf = pd.DataFrame(index=timeindex) - for gen in generators: - cf[gen] = abs( - (voltage_pu.loc[timeindex, gen] - voltage_threshold[timeindex]) - / (v_max[timeindex] - voltage_threshold[timeindex]) - ) - - # initialize model - model = ConcreteModel() - - # add sets - model.T = Set(initialize=timeindex) - model.G = Set(initialize=generators) - - # add parameters - def feedin_init(model, t, g): - return feedin.loc[t, g] - - model.feedin = Param(model.T, model.G, initialize=feedin_init) - - def voltage_pu_init(model, t, g): - return voltage_pu.loc[t, g] - - model.voltage_pu = Param(model.T, model.G, initialize=voltage_pu_init) - - def cf_add_init(model, t, g): - return cf_add.loc[t, g] - - model.cf_add = Param(model.T, model.G, initialize=cf_add_init) - - def cf_init(model, t, g): - return cf.loc[t, g] - - model.cf = Param(model.T, model.G, initialize=cf_init) - - def total_curtailment_init(model, t): - return total_curtailment.loc[t] - - model.total_curtailment = Param(model.T, initialize=total_curtailment_init) - - # add variables - model.offset = Var(model.T, bounds=(0, 1)) - model.cf_max = Var(model.T, bounds=(0, 1)) - - def curtailment_init(model, t, g): - return (0, feedin.loc[t, g]) - - model.c = Var(model.T, model.G, bounds=curtailment_init) - - # add objective - def obj_rule(model): - expr = sum(model.offset[t] * 100 for t in model.T) - return expr - - model.obj = Objective(rule=obj_rule, sense=minimize) - - # add constraints - # curtailment per generator constraints - def curtail(model, t, g): - return ( - model.cf[t, g] * model.cf_max[t] * model.feedin[t, g] - + model.cf_add[t, g] * model.offset[t] * model.feedin[t, g] - - model.c[t, g] - == 0 - ) - - model.curtailment = Constraint(model.T, model.G, rule=curtail) - - # total curtailment constraint - def total_curtailment(model, t): - return sum(model.c[t, g] for g in model.G) == model.total_curtailment[t] - - model.sum_curtailment = Constraint(model.T, rule=total_curtailment) - - # solve - solver = SolverFactory(solver) - results = solver.solve(model, tee=False) - - # load results back into model - model.solutions.load_from(results) - - return pd.DataFrame( - {g: [model.c[t, g].value for t in model.T] for g in model.G}, - index=model.T, - ) - - -def feedin_proportional( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'feedin-proportional'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - # calculate curtailment in each time step of each generator - curtailment = feedin.divide(feedin.sum(axis=1), axis=0).multiply( - curtailment_timeseries, axis=0 - ) - - # substitute NaNs from division with 0 by 0 - curtailment.fillna(0, inplace=True) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _check_curtailment_target(curtailment, curtailment_target, curtailment_key): - """ - Raises an error if curtailment target was not met in any time step. - - ToDo: adapt to refactored code! - - Parameters - ----------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step. - Index is a :pandas:`pandas.DatetimeIndex`, columns are - the generator representatives. - curtailment_target : :pandas:`pandas.Series` - The curtailment in kW that was to be distributed amongst the - generators. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment was specified for. - - """ - raise NotImplementedError - - if not (abs(curtailment.sum(axis=1) - curtailment_target) < 1e-1).all(): - message = "Curtailment target not met for {}.".format(curtailment_key) - logging.error(message) - raise TypeError(message) - - -def _assign_curtailment(curtailment, edisgo, generators, curtailment_key): - """ - Helper function to write curtailment time series to generator objects. - - ToDo: adapt to refactored code! - - This function also writes a list of the curtailed generators to curtailment - in :class:`edisgo.network.network.TimeSeries` and - :class:`edisgo.network.network.Results`. - - Parameters - ---------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - for all generators of the type (and in weather cell) specified in - `curtailment_key` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - edisgo : :class:`~.edisgo.EDisGo` - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - gen_object_list = [] - for gen in curtailment.columns: - # get generator object from representative - gen_object = generators.loc[generators.gen_repr == gen].index[0] - # assign curtailment to individual generators - gen_object.curtailment = curtailment.loc[:, gen] - gen_object_list.append(gen_object) - - # set timeseries.curtailment - if edisgo.network.timeseries._curtailment: - edisgo.network.timeseries._curtailment.extend(gen_object_list) - edisgo.network.results._curtailment[curtailment_key] = gen_object_list - else: - edisgo.network.timeseries._curtailment = gen_object_list - # list needs to be copied, otherwise it will be extended every time - # a new key is added to results._curtailment - edisgo.network.results._curtailment = {curtailment_key: gen_object_list.copy()} - - -class CurtailmentControl: - """ - Allocates given curtailment targets to solar and wind generators. - - ToDo: adapt to refactored code! - - Parameters - ---------- - edisgo: :class:`edisgo.EDisGo` - The parent EDisGo object that this instance is a part of. - methodology : :obj:`str` - Defines the curtailment strategy. Possible options are: - - * 'feedin-proportional' - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. For more information see - :func:`edisgo.flex_opt.curtailment.feedin_proportional`. - * 'voltage-based' - The curtailment that has to be met in each time step is allocated - based on the voltages at the generator connection points and a - defined voltage threshold. Generators at higher voltages - are curtailed more. The default voltage threshold is 1.0 but - can be changed by providing the argument 'voltage_threshold'. This - method formulates the allocation of curtailment as a linear - optimization problem using :py:mod:`Pyomo` and requires a linear - programming solver like coin-or cbc (cbc) or gnu linear programming - kit (glpk). The solver can be specified through the parameter - 'solver'. For more information see - :func:`edisgo.flex_opt.curtailment.voltage_based`. - - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame`, optional - Series or DataFrame containing the curtailment time series in kW. Index - needs to be a :pandas:`pandas.DatetimeIndex`. - Provide a Series if the curtailment time series applies to wind and - solar generators. Provide a DataFrame if the curtailment time series - applies to a specific technology and optionally weather cell. In the - first case columns of the DataFrame are e.g. 'solar' and 'wind'; in the - second case columns need to be a - :pandas:`pandas.MultiIndex` with the first level containing - the type and the second level the weather cell ID. Default: None. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generators - when 'voltage-based' curtailment methodology is chosen. - Possible options are: - - * 'cbc' - * 'glpk' - * any other available solver compatible with 'pyomo' such as 'gurobi' - or 'cplex' - - Default: 'cbc'. - voltage_threshold : :obj:`float` - Voltage below which no curtailment is assigned to the respective - generator if not necessary when 'voltage-based' curtailment methodology - is chosen. See :func:`edisgo.flex_opt.curtailment.voltage_based` for - more information. Default: 1.0. - mode : :obj:`str` - The `mode` is only relevant for curtailment method 'voltage-based'. - Possible options are None and 'mv'. Per default `mode` is None in which - case a power flow is conducted for both the MV and LV. In case `mode` - is set to 'mv' components in underlying LV grids are considered - aggregative. Default: None. - - """ - - # ToDo move some properties from topology here (e.g. peak_load, generators,...) - def __init__( - self, edisgo, methodology, curtailment_timeseries, mode=None, **kwargs - ): - raise NotImplementedError - - logging.info("Start curtailment methodology {}.".format(methodology)) - - self._check_timeindex(curtailment_timeseries, edisgo.topology) - - if methodology == "feedin-proportional": - curtailment_method = feedin_proportional - elif methodology == "voltage-based": - curtailment_method = voltage_based - else: - raise ValueError( - "{} is not a valid curtailment methodology.".format(methodology) - ) - - # check if provided mode is valid - if mode and mode != "mv": - raise ValueError("Provided mode {} is not a valid mode.") - - # get all fluctuating generators and their attributes (weather ID, - # type, etc.) - # TODO: Function get_gen_info does not exist - generators = get_gen_info( # noqa: F821 - edisgo.topology, "mvlv", fluctuating=True - ) - - # do analyze to get all voltages at generators and feed-in dataframe - edisgo.analyze(mode=mode) - - # get feed-in time series of all generators - if not mode: - feedin = edisgo.topology.pypsa.generators_t.p * 1000 - # drop dispatchable generators and slack generator - drop_labels = [ - _ for _ in feedin.columns if "GeneratorFluctuating" not in _ - ] + ["Generator_slack"] - else: - feedin = edisgo.topology.mv_grid.generators_timeseries() - for grid in edisgo.topology.mv_grid.lv_grids: - feedin = pd.concat([feedin, grid.generators_timeseries()], axis=1) - feedin.rename(columns=lambda _: repr(_), inplace=True) - # drop dispatchable generators - drop_labels = [_ for _ in feedin.columns if "GeneratorFluctuating" not in _] - feedin.drop(labels=drop_labels, axis=1, inplace=True) - - if isinstance(curtailment_timeseries, pd.Series): - # check if curtailment exceeds feed-in - self._precheck(curtailment_timeseries, feedin, "all_fluctuating_generators") - - # do curtailment - curtailment_method( - feedin, - generators, - curtailment_timeseries, - edisgo, - "all_fluctuating_generators", - **kwargs - ) - - elif isinstance(curtailment_timeseries, pd.DataFrame): - for col in curtailment_timeseries.columns: - logging.debug("Calculating curtailment for {}".format(col)) - - # filter generators - if isinstance(curtailment_timeseries.columns, pd.MultiIndex): - selected_generators = generators.loc[ - (generators.type == col[0]) - & (generators.weather_cell_id == col[1]) - ] - else: - selected_generators = generators.loc[(generators.type == col)] - - # check if curtailment exceeds feed-in - feedin_selected_generators = feedin.loc[ - :, selected_generators.gen_repr.values - ] - self._precheck( - curtailment_timeseries.loc[:, col], - feedin_selected_generators, - col, - ) - - # do curtailment - if not feedin_selected_generators.empty: - curtailment_method( - feedin_selected_generators, - selected_generators, - curtailment_timeseries.loc[:, col], - edisgo, - col, - **kwargs - ) - - # check if curtailment exceeds feed-in - self._postcheck(edisgo.topology, feedin) - - # update generator time series in pypsa topology - if edisgo.topology.pypsa is not None: - pypsa_io.update_pypsa_generator_timeseries(edisgo.topology) - - # add measure to Results object - edisgo.results.measures = "curtailment" - - def _check_timeindex(self, curtailment_timeseries, network): - """ - Raises an error if time index of curtailment time series does not - comply with the time index of load and feed-in time series. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame` - See parameter `curtailment_timeseries` in class definition for more - information. - - """ - raise NotImplementedError - - if curtailment_timeseries is None: - message = "No curtailment given." - logging.error(message) - raise KeyError(message) - try: - curtailment_timeseries.loc[network.timeseries.timeindex] - except Exception: - message = ( - "Time index of curtailment time series does not match " - "with load and feed-in time series." - ) - logging.error(message) - raise KeyError(message) - - def _precheck(self, curtailment_timeseries, feedin_df, curtailment_key): - """ - Raises an error if the curtailment at any time step exceeds the - total feed-in of all generators curtailment can be distributed among - at that time. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` - Curtailment time series in kW for the technology (and weather - cell) specified in `curtailment_key`. - feedin_df : :pandas:`pandas.Series` - Feed-in time series in kW for all generators of type (and in - weather cell) specified in `curtailment_key`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - Technology (and weather cell) curtailment is given for. - - """ - raise NotImplementedError - - if not feedin_df.empty: - feedin_selected_sum = feedin_df.sum(axis=1) - diff = feedin_selected_sum - curtailment_timeseries - # add tolerance (set small negative values to zero) - diff[diff.between(-1, 0)] = 0 - if not (diff >= 0).all(): - bad_time_steps = [_ for _ in diff.index if diff[_] < 0] - message = ( - "Curtailment demand exceeds total feed-in in time " - "steps {}.".format(bad_time_steps) - ) - logging.error(message) - raise ValueError(message) - else: - bad_time_steps = [ - _ for _ in curtailment_timeseries.index if curtailment_timeseries[_] > 0 - ] - if bad_time_steps: - message = ( - "Curtailment given for time steps {} but there " - "are no generators to meet the curtailment target " - "for {}.".format(bad_time_steps, curtailment_key) - ) - logging.error(message) - raise ValueError(message) - - def _postcheck(self, network, feedin): - """ - Raises an error if the curtailment of a generator exceeds the - feed-in of that generator at any time step. - - Parameters - ----------- - network : :class:`~.network.topology.Topology` - feedin : :pandas:`pandas.DataFrame` - DataFrame with feed-in time series in kW. Columns of the dataframe - are :class:`~.network.components.GeneratorFluctuating`, index is - time index. - - """ - raise NotImplementedError - - curtailment = network.timeseries.curtailment - gen_repr = [repr(_) for _ in curtailment.columns] - feedin_repr = feedin.loc[:, gen_repr] - curtailment_repr = curtailment - curtailment_repr.columns = gen_repr - if not ((feedin_repr - curtailment_repr) > -1e-1).all().all(): - message = "Curtailment exceeds feed-in." - logging.error(message) - raise TypeError(message) From e18809ae8c0ced704eb19d6c4c12be1cd7fe230b Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:14:50 +0200 Subject: [PATCH 083/141] delete unused storage_positioning.py --- edisgo/flex_opt/storage_positioning.py | 707 ------------------------- 1 file changed, 707 deletions(-) delete mode 100644 edisgo/flex_opt/storage_positioning.py diff --git a/edisgo/flex_opt/storage_positioning.py b/edisgo/flex_opt/storage_positioning.py deleted file mode 100644 index b0a7015da..000000000 --- a/edisgo/flex_opt/storage_positioning.py +++ /dev/null @@ -1,707 +0,0 @@ -import logging - -from math import ceil, sqrt - -import networkx as nx -import numpy as np -import pandas as pd - -from networkx.algorithms.shortest_paths.weighted import ( - _dijkstra as dijkstra_shortest_path_length, -) - -from edisgo.flex_opt import check_tech_constraints, costs -from edisgo.tools import plots, tools - -logger = logging.getLogger(__name__) - - -def one_storage_per_feeder( - edisgo, storage_timeseries, storage_nominal_power=None, **kwargs -): - """ - Allocates the given storage capacity to multiple smaller storages. - - ToDo: adapt to refactored code! - - For each feeder with load or voltage issues it is checked if integrating a - storage will reduce peaks in the feeder, starting with the feeder with - the highest theoretical network expansion costs. A heuristic approach is used - to estimate storage sizing and siting while storage operation is carried - over from the given storage operation. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - storage_timeseries : :pandas:`pandas.DataFrame` - Total active and reactive power time series that will be allocated to - the smaller storages in feeders with load or voltage issues. Columns of - the dataframe are 'p' containing active power time series in kW and 'q' - containing the reactive power time series in kvar. Index is a - :pandas:`pandas.DatetimeIndex`. - storage_nominal_power : :obj:`float` or None - Nominal power in kW that will be allocated to the smaller storages in - feeders with load or voltage issues. If no nominal power is provided - the maximum active power given in `storage_timeseries` is used. - Default: None. - debug : :obj:`Boolean`, optional - If dedug is True a dataframe with storage size and path to storage of - all installed and possibly discarded storages is saved to a csv file - and a plot with all storage positions is created and saved, both to the - current working directory with filename `storage_results_{MVgrid_id}`. - Default: False. - check_costs_reduction : :obj:`Boolean` or :obj:`str`, optional - This parameter specifies when and whether it should be checked if a - storage reduced network expansion costs or not. It can be used as a safety - check but can be quite time consuming. Possible options are: - - * 'each_feeder' - Costs reduction is checked for each feeder. If the storage did not - reduce network expansion costs it is discarded. - * 'once' - Costs reduction is checked after the total storage capacity is - allocated to the feeders. If the storages did not reduce network - expansion costs they are all discarded. - * False - Costs reduction is never checked. - - Default: False. - - """ - - def _feeder_ranking(grid_expansion_costs): - """ - Get feeder ranking from network expansion costs DataFrame. - - MV feeders are ranked descending by network expansion costs that are - attributed to that feeder. - - Parameters - ---------- - grid_expansion_costs : :pandas:`pandas.DataFrame` - grid_expansion_costs DataFrame from :class:`~.network.network.Results` - of the copied edisgo object. - - Returns - ------- - :pandas:`pandas.Series` - Series with ranked MV feeders (in the copied graph) of type - :class:`~.network.components.Line`. Feeders are ranked by total network - expansion costs of all measures conducted in the feeder. The - feeder with the highest costs is in the first row and the feeder - with the lowest costs in the last row. - - """ - return ( - grid_expansion_costs.groupby(["mv_feeder"], sort=False) - .sum() - .reset_index() - .sort_values(by=["total_costs"], ascending=False)["mv_feeder"] - ) - - def _shortest_path(node): - # TODO: LVStation class is not used anymore - # resolve this when storage positioning is refactored - if isinstance(node, LVStation): # noqa: F821 - return len(nx.shortest_path(node.mv_grid.graph, node.mv_grid.station, node)) - else: - return len(nx.shortest_path(node.grid.graph, node.grid.station, node)) - - def _find_battery_node(edisgo, critical_lines_feeder, critical_nodes_feeder): - """ - Evaluates where to install the storage. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - critical_lines_feeder : :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - critical_nodes_feeder : :obj:`list` - List with all nodes in MV feeder with voltage issues. - - Returns - ------- - :obj:`float` - Node where storage is installed. - - """ - - # if there are overloaded lines in the MV feeder the battery storage - # will be installed at the node farthest away from the MV station - if not critical_lines_feeder.empty: - logger.debug("Storage positioning due to overload.") - # dictionary with nodes and their corresponding path length to - # MV station - path_length_dict = {} - for line in critical_lines_feeder.index: - nodes = line.grid.graph.nodes_from_line(line) - for node in nodes: - path_length_dict[node] = _shortest_path(node) - # return node farthest away - return [ - _ - for _ in path_length_dict - if path_length_dict[_] == max(path_length_dict.values()) - ][0] - - # if there are voltage issues in the MV network the battery storage will - # be installed at the first node in path that exceeds 2/3 of the line - # length from station to critical node with highest voltage deviation - if critical_nodes_feeder: - logger.debug("Storage positioning due to voltage issues.") - node = critical_nodes_feeder[0] - - # get path length from station to critical node - get_weight = lambda u, v, data: data["line"].length # noqa: E731 - path_length = dijkstra_shortest_path_length( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - get_weight, - target=node, - ) - - # find first node in path that exceeds 2/3 of the line length - # from station to critical node farthest away from the station - path = nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - node, - ) - return next(j for j in path if path_length[j] >= path_length[node] * 2 / 3) - - return None - - def _calc_storage_size(edisgo, feeder, max_storage_size): - """ - Calculates storage size that reduces residual load. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`float` - Storage size that reduced the residual load in the feeder. - - """ - step_size = 200 - sizes = [0] + list( - np.arange(p_storage_min, max_storage_size + 0.5 * step_size, step_size) - ) - p_feeder = edisgo.network.results.pfa_p.loc[:, repr(feeder)] - q_feeder = edisgo.network.results.pfa_q.loc[:, repr(feeder)] - p_slack = edisgo.network.pypsa.generators_t.p.loc[:, "Generator_slack"] * 1e3 - - # get sign of p and q - lines = edisgo.network.pypsa.lines.loc[repr(feeder), :] - mv_station_bus = ( - "bus0" - if lines.loc["bus0"] == f"Bus_{repr(edisgo.network.mv_grid.station)}" - else "bus1" - ) - if mv_station_bus == "bus0": - diff = ( - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - ) - else: - diff = ( - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - ) - p_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff], index=p_feeder.index) - q_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff_q], index=p_feeder.index) - - # get allowed load factors per case - lf = { - "feed-in_case": edisgo.network.config["grid_expansion_load_factors"][ - "mv_feed-in_case_line" - ], - "load_case": network.config["grid_expansion_load_factors"][ - "mv_load_case_line" - ], - } - - # calculate maximum apparent power for each storage size to find - # storage size that minimizes apparent power in the feeder - p_feeder = p_feeder.multiply(p_sign) - q_feeder = q_feeder.multiply(q_sign) - s_max = [] - for size in sizes: - share = size / storage_nominal_power - p_storage = storage_timeseries.p * share - q_storage = storage_timeseries.q * share - p_total = p_feeder + p_storage - q_total = q_feeder + q_storage - p_hv_mv_station = p_slack - p_storage - lf_ts = p_hv_mv_station.apply( - lambda _: lf["feed-in_case"] if _ < 0 else lf["load_case"] - ) - s_max_ts = (p_total**2 + q_total**2).apply(sqrt).divide(lf_ts) - s_max.append(max(s_max_ts)) - - return sizes[pd.Series(s_max).idxmin()] - - def _critical_nodes_feeder(edisgo, feeder): - """ - Returns all nodes in MV feeder with voltage issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`list` - List with all nodes in MV feeder with voltage issues. - - """ - # get all nodes with voltage issues in MV network - critical_nodes = check_tech_constraints.voltage_issues( - edisgo.network, voltage_levels="mv" - ) - if critical_nodes: - critical_nodes = critical_nodes[edisgo.network.mv_grid] - else: - return [] - - return [n for n in critical_nodes.index if repr(n.mv_feeder) == repr(feeder)] - - def _critical_lines_feeder(edisgo, feeder): - """ - Returns all lines in MV feeder with overload issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - - """ - # return grid_expansion_costs_feeder_ranking[ - # (grid_expansion_costs_feeder_ranking.mv_feeder == feeder) & - # (grid_expansion_costs_feeder_ranking.voltage_level == 'mv')] - # get all overloaded MV lines - critical_lines = check_tech_constraints.mv_line_overload(edisgo.network) - # filter overloaded lines in feeder - critical_lines_feeder = [ - line - for line in critical_lines.index - if repr(tools.get_mv_feeder_from_line(line)) == repr(feeder) - ] - - return critical_lines.loc[critical_lines_feeder, :] - - def _estimate_new_number_of_lines(critical_lines_feeder): - return sum( - ( - ceil( - critical_lines_feeder.loc[crit_line, "max_rel_overload"] - * crit_line.quantity - ) - - crit_line.quantity - ) - for crit_line in critical_lines_feeder.index - ) - - raise NotImplementedError - - debug = kwargs.get("debug", False) - check_costs_reduction = kwargs.get("check_costs_reduction", False) - - # global variables - # minimum and maximum storage power to be connected to the MV network - p_storage_min = 300 - p_storage_max = 4500 - - # remaining storage nominal power - if storage_nominal_power is None: - storage_nominal_power = max(abs(storage_timeseries.p)) - p_storage_remaining = storage_nominal_power - - if debug: - feeder_repr = [] - storage_path = [] - storage_repr = [] - storage_size = [] - - # rank MV feeders by network expansion costs - - # conduct network reinforcement on copied edisgo object on worst-case time - # steps - grid_expansion_results_init = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis", mode="mv" - ) - - # only analyse storage integration if there were any network expansion needs - if grid_expansion_results_init.equipment_changes.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - equipment_changes_reinforcement_init = ( - grid_expansion_results_init.equipment_changes.loc[ - grid_expansion_results_init.equipment_changes.iteration_step > 0 - ] - ) - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - if equipment_changes_reinforcement_init.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - network = equipment_changes_reinforcement_init.index[0].grid.network - - # calculate network expansion costs without costs for new generators - # to be used in feeder ranking - grid_expansion_costs_feeder_ranking = costs.grid_expansion_costs( - network, without_generator_import=True, mode="mv" - ) - - ranked_feeders = _feeder_ranking(grid_expansion_costs_feeder_ranking) - - count = 1 - storage_obj_list = [] - total_grid_expansion_costs_new = "not calculated" - for feeder in ranked_feeders.values: - logger.debug("Feeder: {}".format(count)) - count += 1 - - # first step: find node where storage will be installed - - critical_nodes_feeder = _critical_nodes_feeder(edisgo, feeder) - critical_lines_feeder = _critical_lines_feeder(edisgo, feeder) - - # get node the storage will be connected to (in original graph) - battery_node = _find_battery_node( - edisgo, critical_lines_feeder, critical_nodes_feeder - ) - - if battery_node: - # add to output lists - if debug: - feeder_repr.append(repr(feeder)) - storage_path.append( - nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - battery_node, - ) - ) - - # second step: calculate storage size - - max_storage_size = min(p_storage_remaining, p_storage_max) - p_storage = _calc_storage_size(edisgo, feeder, max_storage_size) - - # if p_storage is greater than or equal to the minimum storage - # power required, do storage integration - if p_storage >= p_storage_min: - # third step: integrate storage - - share = p_storage / storage_nominal_power - edisgo.integrate_storage( - timeseries=storage_timeseries.p * share, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * share, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - # get new storage object - storage_obj = [ - _ - for _ in edisgo.network.mv_grid.graph.nodes_by_attribute("storage") - if _ in list(edisgo.network.mv_grid.graph.neighbors(battery_node)) - ][0] - storage_obj_list.append(storage_obj) - - logger.debug( - "Storage with nominal power of {} kW connected to " - "node {} (path to HV/MV station {}).".format( - p_storage, - battery_node, - nx.shortest_path( - battery_node.grid.graph, - battery_node.grid.station, - battery_node, - ), - ) - ) - - # fourth step: check if storage integration reduced network - # reinforcement costs or number of issues - - if check_costs_reduction == "each_feeder": - # calculate new network expansion costs - - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - # fmt: off - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum( - ) - ) - # fmt: on - - costs_diff = ( - total_grid_expansion_costs - total_grid_expansion_costs_new - ) - - if costs_diff > 0: - logger.debug( - "Storage integration in feeder {} reduced network " - "expansion costs by {} kEuro.".format(feeder, costs_diff) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - total_grid_expansion_costs = total_grid_expansion_costs_new - - else: - logger.debug( - "Storage integration in feeder {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(feeder, -costs_diff) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - else: - number_parallel_lines_before = _estimate_new_number_of_lines( - critical_lines_feeder - ) - edisgo.analyze() - critical_lines_feeder_new = _critical_lines_feeder(edisgo, feeder) - critical_nodes_feeder_new = _critical_nodes_feeder(edisgo, feeder) - number_parallel_lines = _estimate_new_number_of_lines( - critical_lines_feeder_new - ) - - # if there are critical lines check if number of parallel - # lines was reduced - if not critical_lines_feeder.empty: - diff_lines = ( - number_parallel_lines_before - number_parallel_lines - ) - # if it was not reduced check if there are critical - # nodes and if the number was reduced - if diff_lines <= 0: - # if there are no critical nodes remove storage - if not critical_nodes_feeder: - logger.debug( - "Storage integration in feeder {} did not " - "reduce number of critical lines (number " - "increased by {}), storage " - "is therefore removed.".format(feeder, -diff_lines) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q - * 0, - ) - tools.assign_mv_feeder_to_nodes( - edisgo.network.mv_grid - ) - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - else: - logger.debug( - "Storage integration in feeder {} reduced " - "number of critical lines.".format(feeder) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # if there are no critical lines - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # fifth step: if there is storage capacity left, rerun - # the past steps for the next feeder in the ranking - # list - p_storage_remaining = p_storage_remaining - p_storage - if not p_storage_remaining > p_storage_min: - break - - else: - logger.debug("No storage integration in feeder {}.".format(feeder)) - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - else: - logger.debug( - "No storage integration in feeder {} because there " - "are neither overloading nor voltage issues.".format(feeder) - ) - - if debug: - storage_repr.append(None) - storage_size.append(0) - feeder_repr.append(repr(feeder)) - storage_path.append([]) - - if check_costs_reduction == "once": - # check costs reduction and discard all storages if costs were not - # reduced - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum() - ) - - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - if costs_diff > 0: - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - else: - logger.info( - "Storage integration in network {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(edisgo.network.id, -costs_diff) - ) - - for storage in storage_obj_list: - tools.disconnect_storage(edisgo.network, storage) - elif check_costs_reduction == "each_feeder": - # if costs redcution was checked after each storage only give out - # total costs reduction - if total_grid_expansion_costs_new == "not calculated": - costs_diff = 0 - else: - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - - if debug: - plots.storage_size( - edisgo.network.mv_grid, - edisgo.network.pypsa, - filename="storage_results_{}.pdf".format(edisgo.network.id), - lopf=False, - ) - storages_df = pd.DataFrame( - { - "path": storage_path, - "repr": storage_repr, - "p_nom": storage_size, - }, - index=feeder_repr, - ) - storages_df.to_csv("storage_results_{}.csv".format(edisgo.network.id)) - - edisgo.network.results.storages_costs_reduction = pd.DataFrame( - { - "grid_expansion_costs_initial": total_grid_expansion_costs, - "grid_expansion_costs_with_storages": total_grid_expansion_costs_new, - }, - index=[edisgo.network.id], - ) From 9bcd6d0c33ad5e2d47b3bd171ddbb53113f0fc91 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:16:17 +0200 Subject: [PATCH 084/141] Update pypsa to 0.26.2 and delete pyomo dependencies --- eDisGo_env.yml | 2 +- eDisGo_env_dev.yml | 2 +- rtd_requirements.txt | 3 +-- setup.py | 3 +-- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 06e2b03e6..7e9a6ef7a 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - eDisGo diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index eb9cb30df..3cbea7d30 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - -e .[dev] diff --git a/rtd_requirements.txt b/rtd_requirements.txt index dd3c393d5..84a371278 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -8,10 +8,9 @@ multiprocess networkx >= 2.5.0 pandas >= 1.4.0 plotly -pyomo >= 6.0 pypower pyproj >= 3.0.0 -pypsa >=0.17.0, <=0.20.1 +pypsa == 0.26.2 pyyaml saio scikit-learn diff --git a/setup.py b/setup.py index 0ccb320b3..5b67f566a 100644 --- a/setup.py +++ b/setup.py @@ -50,10 +50,9 @@ def read(fname): "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa == 0.26.2", "pyyaml", "saio", "scikit-learn <= 1.1.1", From 743b4cb6e22b4eb216c931a097c6dc8717d28237 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:16:29 +0200 Subject: [PATCH 085/141] delete unused files --- doc/conf.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 2b2867de2..30cafa8a5 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -71,8 +71,6 @@ ] # Files to ignore when building api documentation autoapi_ignore = [ - "*/flex_opt/curtailment.py", - "*/flex_opt/storage_positioning.py", "*/opf/timeseries_reduction.py", "*/opf/opf_solutions/*", ] From d1a712c06eb39af25b74b33119b87138ea0e05c7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:19:30 +0200 Subject: [PATCH 086/141] update demandlib version --- rtd_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtd_requirements.txt b/rtd_requirements.txt index 84a371278..c9e102ded 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,5 +1,5 @@ dash < 2.9.0 -demandlib +demandlib < 0.2.0 egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash From 6a90cc4add1aacbdde66f78d5a0158b67c50e132 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 17 Jul 2024 09:00:02 +0200 Subject: [PATCH 087/141] removing unused conda installs --- eDisGo_env.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index bb82cfdac..ccfc6f8d7 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -17,6 +17,5 @@ dependencies: - conda-forge::contextily - conda-forge::descartes - conda-forge::pypsa == 0.26.2 - - conda-forge::scikit-learn < 1.3.0 - pip: - eDisGo From d8f1a687d9eb0b0e2f814bdae6593d3f0af1cce0 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 17 Jul 2024 09:00:37 +0200 Subject: [PATCH 088/141] removing doubled line --- rtd_requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/rtd_requirements.txt b/rtd_requirements.txt index 2f2937c68..3900ea862 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,6 +1,5 @@ dash < 2.9.0 demandlib < 0.2.0 -demandlib < 0.2.0 egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash From f9f5fd8da5d44b0cfe02029eabc543aa5b819ab4 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 17 Jul 2024 09:02:39 +0200 Subject: [PATCH 089/141] removing space --- eDisGo_env.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index ccfc6f8d7..c9f4e6239 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa == 0.26.2 + - conda-forge::pypsa == 0.26.2 - pip: - eDisGo From f2c7f845d401000c88102d5935d358e0213d1ac0 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 17 Jul 2024 09:04:47 +0200 Subject: [PATCH 090/141] removing unused conda installs --- eDisGo_env_dev.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index 988743029..859e39546 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -16,7 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa == 0.26.2 - - conda-forge::scikit-learn < 1.3.0 + - conda-forge::pypsa == 0.26.2 - pip: - -e .[dev] From d0bb7f6200bc9d8461352cf38274f4d3ca5bf5f3 Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Wed, 17 Jul 2024 17:39:27 +0200 Subject: [PATCH 091/141] Feature/compatibility python 3.11 (#408) * improve python3.11 compatibility --- .github/workflows/tests-coverage.yml | 3 +++ eDisGo_env.yml | 2 +- eDisGo_env_dev.yml | 2 +- edisgo/network/topology.py | 2 +- rtd_requirements.txt | 2 +- setup.py | 2 +- 6 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 0005070a1..6bcc81f6d 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -24,6 +24,9 @@ jobs: - name-suffix: "basic" os: ubuntu-latest python-version: "3.10" + - name-suffix: "basic" + os: ubuntu-latest + python-version: 3.11 - name-suffix: "basic" os: windows-latest python-version: 3.9 diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 7e9a6ef7a..c9f4e6239 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.9, < 3.11 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index 3cbea7d30..859e39546 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.9, < 3.11 + - python >= 3.9, <= 3.11 - pip - pandas >= 1.4, < 2.2.0 - conda-forge::fiona diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index f411bf312..2462314f0 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -2123,7 +2123,7 @@ def _choose_random_substation_id(): """ if comp_type == "generator": - random.seed(a=comp_data["generator_id"]) + random.seed(a=int(comp_data["generator_id"])) elif comp_type == "storage_unit": random.seed(a=len(self.storage_units_df)) else: diff --git a/rtd_requirements.txt b/rtd_requirements.txt index c9e102ded..3900ea862 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -13,7 +13,7 @@ pyproj >= 3.0.0 pypsa == 0.26.2 pyyaml saio -scikit-learn +scikit-learn < 1.3.0 sphinx sphinx_rtd_theme >=0.5.2 sphinx-autodoc-typehints diff --git a/setup.py b/setup.py index 5b67f566a..f04a4b97c 100644 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ def read(fname): "pypsa == 0.26.2", "pyyaml", "saio", - "scikit-learn <= 1.1.1", + "scikit-learn < 1.3.0", "shapely >= 1.7.0", "sqlalchemy < 1.4.0", "sshtunnel", From f9972bc6f37cedb55f8a435a960082b49e94d580 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 22 Jul 2024 10:42:07 +0200 Subject: [PATCH 092/141] Update select_cable to consider voltage drop constraints --- edisgo/tools/tools.py | 109 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 103 insertions(+), 6 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index d05fe1b86..3d462f184 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -193,7 +193,72 @@ def drop_duplicated_columns(df, keep="last"): return df.loc[:, ~df.columns.duplicated(keep=keep)] -def select_cable(edisgo_obj, level, apparent_power): +def calculate_voltage_drop(s_max, r_total, x_total, v_nom, cos_phi=0.95): + """ + Calculate voltage drop in kV. + + Parameters + ---------- + s_max : float or array-like + Apparent power in kVA. + r_total : float or array-like + Total resistance in Ohm. + x_total : float or array-like + Total reactance in Ohm. + v_nom : float or array-like + Nominal voltage in kV. + cos_phi : float + Cosine phi of the load or generator. Default: 0.95. + Returns + ------- + float + Voltage drop in kV. + """ + return np.abs( + s_max / v_nom * (r_total * cos_phi + x_total * sqrt(1 - (cos_phi) ** 2)) * 1e-3 + ) + + +def voltage_drop_percentage( + R_per_km, L_per_km, length, num_parallel, v_nom, s_max, cos_phi=0.95 +): + """ + Calculate the voltage drop percentage. + + Parameters + ---------- + R_per_km : float or array-like + Resistance per kilometer of the cable in ohm/km. + L_per_km : float or array-like + Inductance per kilometer of the cable in mH/km. + length : float + Length of the cable in km. + num_parallel : int + Number of parallel cables. + v_nom : int + Nominal voltage in kV. + s_max : float + Apparent power in kVA. + cos_phi : float + Cosine phi of the load or generator. Default: 0.95. + Returns + ------- + float + Voltage drop in percentage of nominal voltage. + """ + # Calculate resistance and reactance for the given length and + # number of parallel cables + r_total = calculate_line_resistance(R_per_km, length, num_parallel) + x_total = calculate_line_reactance(L_per_km, length, num_parallel) + + # Calculate the voltage drop or increase + delta_v = calculate_voltage_drop(s_max, r_total, x_total, v_nom, cos_phi) + return delta_v / v_nom + + +def select_cable( + edisgo_obj, level, apparent_power, length=0, max_voltage_drop=None, max_cables=7 +): """ Selects suitable cable type and quantity using given apparent power. @@ -219,7 +284,15 @@ def select_cable(edisgo_obj, level, apparent_power): Number of necessary parallel cables. """ - + if not max_voltage_drop: + if level == "mv": + max_voltage_drop = edisgo_obj.config._data[ + "grid_expansion_allowed_voltage_deviations" + ]["mv_max_v_drop"] + elif level == "lv": + max_voltage_drop = edisgo_obj.config._data[ + "grid_expansion_allowed_voltage_deviations" + ]["lv_max_v_drop"] cable_count = 1 if level == "mv": @@ -240,18 +313,42 @@ def select_cable(edisgo_obj, level, apparent_power): ) > apparent_power ] + if length != 0: + suitable_cables = suitable_cables[ + voltage_drop_percentage( + R_per_km=available_cables["R_per_km"], + L_per_km=available_cables["L_per_km"], + length=length, + num_parallel=cable_count, + v_nom=available_cables["U_n"], + s_max=apparent_power, + cos_phi=0.9, + ) + < max_voltage_drop + ] # increase cable count until appropriate cable type is found - while suitable_cables.empty and cable_count < 7: + while suitable_cables.empty and cable_count < max_cables: # parameter cable_count += 1 suitable_cables = available_cables[ calculate_apparent_power( - available_cables["U_n"], - available_cables["I_max_th"], - cable_count, + available_cables["U_n"], available_cables["I_max_th"], cable_count ) > apparent_power ] + if length != 0: + suitable_cables = suitable_cables[ + voltage_drop_percentage( + R_per_km=available_cables["R_per_km"], + L_per_km=available_cables["L_per_km"], + length=length, + num_parallel=cable_count, + v_nom=available_cables["U_n"], + s_max=apparent_power, + cos_phi=0.9, + ) + < max_voltage_drop + ] if suitable_cables.empty: raise exceptions.MaximumIterationError( "Could not find a suitable cable for apparent power of " From 3dc762b32a37c3f8906eea54fe8fbd9c67bdfab8 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 22 Jul 2024 10:43:01 +0200 Subject: [PATCH 093/141] adding test for cable voltage drop --- tests/tools/test_tools.py | 74 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 40c34a63b..d2baaf222 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -30,6 +30,54 @@ def test_calculate_line_reactance(self): data = tools.calculate_line_reactance(np.array([2, 3]), 3, 2) assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) + def test_voltage_drop(self): + data = tools.calculate_voltage_drop(50, 0.125, 0.36, 20, 0.9) + assert np.isclose(data, 0.67355 * 1e-3) + data = tools.calculate_voltage_drop( + np.array([50, 50]), + np.array([0.125, 0.125]), + np.array([0.36, 0.36]), + 20, + 0.9, + ) + assert_allclose(data, np.array([0.67355 * 1e-3, 0.67355 * 1e-3]), rtol=1e-5) + data = tools.calculate_voltage_drop(50, 0.125, 0.36, 40, 0.9) + assert np.isclose(data, 0.67355 * 1e-3 / 2) + data = tools.calculate_voltage_drop(100, 0.125, 0.36, 20, 0.9) + assert np.isclose(data, 0.67355 * 1e-3 * 2) + data = tools.calculate_voltage_drop( + np.array([100, 100]), + np.array([0.125, 0.125]), + np.array([0.36, 0.36]), + np.array([20, 20]), + 0.9, + ) + assert_allclose( + data, np.array([0.67355 * 1e-3 * 2, 0.67355 * 1e-3 * 2]), rtol=1e-5 + ) + + def test_voltage_drop_percentage(self): + data = tools.voltage_drop_percentage(0.152, 0.360, 1, 1, 20, 50, 0.9) + assert np.isclose(data, 2.326224820444546e-5) + data = tools.voltage_drop_percentage( + np.array([0.152, 0.152]), np.array([0.360, 0.360]), 1, 1, 20, 50, 0.9 + ) + assert_allclose( + data, np.array([2.326224820444546e-5, 2.326224820444546e-5]), rtol=1e-5 + ) + data = tools.voltage_drop_percentage(0.152, 0.360, 2, 1, 20, 50, 0.9) + assert np.isclose(data, 2 * 2.326224820444546e-5) + data = tools.voltage_drop_percentage( + np.array([0.152, 0.152]), np.array([0.360, 0.360]), 2, 1, 20, 50, 0.9 + ) + assert_allclose( + data, + np.array([2 * 2.326224820444546e-5, 2 * 2.326224820444546e-5]), + rtol=1e-5, + ) + data = tools.voltage_drop_percentage(0.152, 0.360, 1, 2, 20, 50, 0.9) + assert np.isclose(data, 2.326224820444546e-5 / 2) + def test_calculate_line_resistance(self): # test single line data = tools.calculate_line_resistance(2, 3, 1) @@ -97,6 +145,7 @@ def test_drop_duplicated_columns(self): assert (check_df.loc[:, "a"] == [4, 5, 6]).all() def test_select_cable(self): + # no length given cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 5.1) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 @@ -109,6 +158,31 @@ def test_select_cable(self): assert cable_data.name == "NAYY 4x1x150" assert num_parallel_cables == 1 + # length given + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, "mv", 5.1, 1000 + ) + assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" + assert num_parallel_cables == 1 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, "mv", 40, 1000 + ) + assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" + assert num_parallel_cables == 2 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, "lv", 0.18, 1000 + ) + assert cable_data.name == "NAYY 4x1x240" + assert num_parallel_cables == 3 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, "lv", 0.18, 1000, max_voltage_drop=0.01, max_cables=100 + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 15 + def test_get_downstream_buses(self): # ######## test with LV bus ######## buses_downstream = tools.get_downstream_buses( From 9d0411a53f3d51174d18dbe7299311041001c365 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 22 Jul 2024 13:29:23 +0200 Subject: [PATCH 094/141] Adding default values for adding new components --- edisgo/config/config_grid_default.cfg | 6 ++++++ edisgo/tools/tools.py | 20 +++++++++----------- tests/tools/test_tools.py | 2 +- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/edisgo/config/config_grid_default.cfg b/edisgo/config/config_grid_default.cfg index f48c68d8f..7cab549de 100644 --- a/edisgo/config/config_grid_default.cfg +++ b/edisgo/config/config_grid_default.cfg @@ -55,3 +55,9 @@ upper_limit_voltage_level_4 = 20.0 # Positioning of disconnecting points: Can be position at location of most # balanced load or generation. Choose load, generation, loadgen position = load + +[new_components] +# Define the default values for adding new components to the grid + +lv_max_voltage_deviation = 0.035 +mv_max_voltage_deviation = 0.015 diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 3d462f184..e37ba3fa2 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -284,29 +284,27 @@ def select_cable( Number of necessary parallel cables. """ - if not max_voltage_drop: - if level == "mv": - max_voltage_drop = edisgo_obj.config._data[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_max_v_drop"] - elif level == "lv": - max_voltage_drop = edisgo_obj.config._data[ - "grid_expansion_allowed_voltage_deviations" - ]["lv_max_v_drop"] - cable_count = 1 if level == "mv": cable_data = edisgo_obj.topology.equipment_data["mv_cables"] available_cables = cable_data[ cable_data["U_n"] == edisgo_obj.topology.mv_grid.nominal_voltage ] + if not max_voltage_drop: + max_voltage_drop = edisgo_obj.config._data["new_components"][ + "mv_max_voltage_deviation" + ] elif level == "lv": available_cables = edisgo_obj.topology.equipment_data["lv_cables"] + if not max_voltage_drop: + max_voltage_drop = edisgo_obj.config._data["new_components"][ + "lv_max_voltage_deviation" + ] else: raise ValueError( "Specified voltage level is not valid. Must either be 'mv' or 'lv'." ) - + cable_count = 1 suitable_cables = available_cables[ calculate_apparent_power( available_cables["U_n"], available_cables["I_max_th"], cable_count diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index d2baaf222..996f70b95 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -175,7 +175,7 @@ def test_select_cable(self): self.edisgo, "lv", 0.18, 1000 ) assert cable_data.name == "NAYY 4x1x240" - assert num_parallel_cables == 3 + assert num_parallel_cables == 5 cable_data, num_parallel_cables = tools.select_cable( self.edisgo, "lv", 0.18, 1000, max_voltage_drop=0.01, max_cables=100 From 9f00fa701b76c1d59ba0bd3b0fbef620b4674528 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 22 Jul 2024 14:26:17 +0200 Subject: [PATCH 095/141] Add warning if meshes are in the grid --- edisgo/opf/powermodels_opf.py | 28 ++++++++++++++++++++++++++++ tests/opf/test_powermodels_opf.py | 27 ++++++++++++++++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/edisgo/opf/powermodels_opf.py b/edisgo/opf/powermodels_opf.py index db4925d3b..2d082f0cf 100644 --- a/edisgo/opf/powermodels_opf.py +++ b/edisgo/opf/powermodels_opf.py @@ -4,6 +4,7 @@ import subprocess import sys +import networkx as nx import numpy as np from edisgo.flex_opt import exceptions @@ -12,6 +13,32 @@ logger = logging.getLogger(__name__) +def find_meshes(edisgo_obj) -> list: + """ + Find all meshes in the grid. + + Parameters + ---------- + edisgo_obj : :class:`~edisgo.EDisGo` + EDisGo object. + + Returns + ------- + meshes : list + List of all meshes in the grid. + + """ + meshes = nx.cycle_basis(edisgo_obj.to_graph()) + if meshes: + logger.warning( + "Grid contains mesh(es). This might cause problems in " + "the power flow or optimisation." + ) + return meshes + else: + return None + + def pm_optimize( edisgo_obj, s_base=1, @@ -105,6 +132,7 @@ def pm_optimize( Default: True. """ + find_meshes(edisgo_obj) opf_dir = os.path.dirname(os.path.abspath(__file__)) solution_dir = os.path.join(opf_dir, "opf_solutions") pm, hv_flex_dict = edisgo_obj.to_powermodels( diff --git a/tests/opf/test_powermodels_opf.py b/tests/opf/test_powermodels_opf.py index 4f6482f97..931a7cb6b 100644 --- a/tests/opf/test_powermodels_opf.py +++ b/tests/opf/test_powermodels_opf.py @@ -3,7 +3,7 @@ import pytest from edisgo import EDisGo -from edisgo.opf.powermodels_opf import pm_optimize +from edisgo.opf.powermodels_opf import find_meshes, pm_optimize from edisgo.tools.tools import aggregate_district_heating_components @@ -337,3 +337,28 @@ def test_pm_optimize(self): ) ) ) + + def test_find_meshes(self, caplog): + meshes = find_meshes(self.edisgo) + assert not meshes + self.edisgo.topology.add_line( + "Bus_GeneratorFluctuating_2", + "Bus_GeneratorFluctuating_6", + 0.1, + x=0.1, + r=0.1, + ) + meshes = find_meshes(self.edisgo) + assert len(meshes) == 1 + assert "Bus_GeneratorFluctuating_2" in meshes[0] + assert "Bus_GeneratorFluctuating_6" in meshes[0] + self.edisgo.topology.add_line( + "Bus_BranchTee_LVGrid_2_3", "Bus_BranchTee_LVGrid_3_3", 0.1, x=0.1, r=0.1 + ) + meshes = find_meshes(self.edisgo) + assert len(meshes) == 2 + assert "Bus_BranchTee_LVGrid_2_3" in meshes[1] + assert ( + "Grid contains mesh(es). This might cause problems" + " in the power flow or optimisation." in caplog.text + ) From b8a8baacc4b57af02571bc8e077bd4121a54ff1d Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 22 Jul 2024 15:11:13 +0200 Subject: [PATCH 096/141] Update default voltage deviation values for new components --- edisgo/config/config_grid_default.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/edisgo/config/config_grid_default.cfg b/edisgo/config/config_grid_default.cfg index 7cab549de..1f4f9d588 100644 --- a/edisgo/config/config_grid_default.cfg +++ b/edisgo/config/config_grid_default.cfg @@ -59,5 +59,5 @@ position = load [new_components] # Define the default values for adding new components to the grid -lv_max_voltage_deviation = 0.035 -mv_max_voltage_deviation = 0.015 +lv_max_voltage_deviation = 0.03 # nach VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04 +mv_max_voltage_deviation = 0.02 # nach VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09 From bab29bceae9079a965aa66c3bfd3ca75c30d482e Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 13:02:36 +0200 Subject: [PATCH 097/141] Update default voltage deviation values for new components --- edisgo/config/config_grid_default.cfg | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/edisgo/config/config_grid_default.cfg b/edisgo/config/config_grid_default.cfg index 1f4f9d588..d1f082ca7 100644 --- a/edisgo/config/config_grid_default.cfg +++ b/edisgo/config/config_grid_default.cfg @@ -50,14 +50,13 @@ upper_limit_voltage_level_6 = 0.2 upper_limit_voltage_level_5 = 5.5 upper_limit_voltage_level_4 = 20.0 +lv_max_voltage_deviation = 0.03 +# from VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04 +mv_max_voltage_deviation = 0.02 +# from VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09 + [disconnecting_point] # Positioning of disconnecting points: Can be position at location of most # balanced load or generation. Choose load, generation, loadgen position = load - -[new_components] -# Define the default values for adding new components to the grid - -lv_max_voltage_deviation = 0.03 # nach VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04 -mv_max_voltage_deviation = 0.02 # nach VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09 From f31886aa4e4821103cca4be2a68516dc0b858a3c Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 13:03:09 +0200 Subject: [PATCH 098/141] Refactor calculate_voltage_drop function for clarity and readability --- edisgo/tools/tools.py | 124 ++++++++++++++++++++++++++++++------------ 1 file changed, 89 insertions(+), 35 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index e37ba3fa2..2e1a87719 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -193,37 +193,57 @@ def drop_duplicated_columns(df, keep="last"): return df.loc[:, ~df.columns.duplicated(keep=keep)] -def calculate_voltage_drop(s_max, r_total, x_total, v_nom, cos_phi=0.95): +def calculate_voltage_diff_per_line( + s_max: float | np.ndarray, + r_total: float | np.ndarray, + x_total: float | np.ndarray, + v_nom: float | np.ndarray, + sign: int = -1, + cos_phi: float = 0.95, +) -> float | np.ndarray: """ - Calculate voltage drop in kV. + Calculate the voltage drop across a line in kV. Parameters ---------- s_max : float or array-like - Apparent power in kVA. + Apparent power the cable must carry in MVA. r_total : float or array-like - Total resistance in Ohm. + Total resistance in Ohms. x_total : float or array-like - Total reactance in Ohm. + Total reactance in Ohms. v_nom : float or array-like Nominal voltage in kV. - cos_phi : float - Cosine phi of the load or generator. Default: 0.95. + sign : int, optional + Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. + cos_phi : float, optional + Power factor (cosine of the phase angle) of the load or generator. + Default is 0.95. + Returns ------- - float + float or array-like Voltage drop in kV. """ - return np.abs( - s_max / v_nom * (r_total * cos_phi + x_total * sqrt(1 - (cos_phi) ** 2)) * 1e-3 + sin_phi = np.sqrt(1 - cos_phi**2) + voltage_diff = np.abs( + (s_max * 1e6 / (v_nom * 1e3)) * (r_total * cos_phi + sign * x_total * sin_phi) ) - - -def voltage_drop_percentage( - R_per_km, L_per_km, length, num_parallel, v_nom, s_max, cos_phi=0.95 -): + return voltage_diff / 1e3 # Convert to kV + + +def voltage_diff_pu( + R_per_km: float | np.ndarray, + L_per_km: float | np.ndarray, + length: float, + num_parallel: int, + v_nom: float | np.ndarray, + s_max: float | np.ndarray, + cos_phi: float = 0.95, + sign: int = -1, +) -> float | np.ndarray: """ - Calculate the voltage drop percentage. + Calculate the voltage drop per unit of nominal voltage. Parameters ---------- @@ -238,27 +258,43 @@ def voltage_drop_percentage( v_nom : int Nominal voltage in kV. s_max : float - Apparent power in kVA. - cos_phi : float + Apparent power the cable must carry in MVA. + cos_phi : float, optional Cosine phi of the load or generator. Default: 0.95. + sign : int, optional + Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. + Returns ------- float - Voltage drop in percentage of nominal voltage. + Voltage drop in per unit of nominal voltage. """ - # Calculate resistance and reactance for the given length and + # Calculate total resistance and reactance for the given length and # number of parallel cables r_total = calculate_line_resistance(R_per_km, length, num_parallel) x_total = calculate_line_reactance(L_per_km, length, num_parallel) # Calculate the voltage drop or increase - delta_v = calculate_voltage_drop(s_max, r_total, x_total, v_nom, cos_phi) - return delta_v / v_nom + delta_v = calculate_voltage_diff_per_line( + s_max, r_total, x_total, v_nom, sign=sign, cos_phi=cos_phi + ) + + # Convert voltage drop to per unit of nominal voltage + voltage_drop_pu = delta_v / v_nom + + return voltage_drop_pu def select_cable( - edisgo_obj, level, apparent_power, length=0, max_voltage_drop=None, max_cables=7 -): + edisgo_obj: EDisGo, + level: str, + apparent_power: float, + length: float = 0, + max_voltage_diff: float | None = None, + max_cables: int = 7, + cos_phi: float | None = 0.95, + inductive_reactance: bool = True, +) -> tuple[pd.Series, int]: """ Selects suitable cable type and quantity using given apparent power. @@ -274,6 +310,17 @@ def select_cable( 'lv'. apparent_power : float Apparent power the cable must carry in MVA. + length : float + Length of the cable in km. Default: 0. + max_voltage_diff : float + Maximum voltage drop in pu. Default: None. + max_cables : int + Maximum number of parallel cables to consider. Default is 7. + cos_phi : float + Cosine phi of the load or generator. Default: 0.95. + inductive_reactance : bool + If True, inductive reactance is considered. Default + is True. If False, capacitive reactance is considered. Returns ------- @@ -284,20 +331,25 @@ def select_cable( Number of necessary parallel cables. """ - + if not cos_phi: + cos_phi = 0.95 + if inductive_reactance: + sign = -1 + else: + sign = 1 if level == "mv": cable_data = edisgo_obj.topology.equipment_data["mv_cables"] available_cables = cable_data[ cable_data["U_n"] == edisgo_obj.topology.mv_grid.nominal_voltage ] - if not max_voltage_drop: - max_voltage_drop = edisgo_obj.config._data["new_components"][ + if not max_voltage_diff: + max_voltage_diff = edisgo_obj.config["grid_connection"][ "mv_max_voltage_deviation" ] elif level == "lv": available_cables = edisgo_obj.topology.equipment_data["lv_cables"] - if not max_voltage_drop: - max_voltage_drop = edisgo_obj.config._data["new_components"][ + if not max_voltage_diff: + max_voltage_diff = edisgo_obj.config["grid_connection"][ "lv_max_voltage_deviation" ] else: @@ -313,16 +365,17 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_drop_percentage( + voltage_diff_pu( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, num_parallel=cable_count, v_nom=available_cables["U_n"], s_max=apparent_power, - cos_phi=0.9, + cos_phi=cos_phi, + sign=sign, ) - < max_voltage_drop + < max_voltage_diff ] # increase cable count until appropriate cable type is found @@ -336,16 +389,17 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_drop_percentage( + voltage_diff_pu( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, num_parallel=cable_count, v_nom=available_cables["U_n"], s_max=apparent_power, - cos_phi=0.9, + cos_phi=cos_phi, + sign=sign, ) - < max_voltage_drop + < max_voltage_diff ] if suitable_cables.empty: raise exceptions.MaximumIterationError( From 8adc37270a916c3f7202012f75a69f091d9d314b Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 13:03:52 +0200 Subject: [PATCH 099/141] update tests select cables --- tests/tools/test_tools.py | 83 ++++++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 37 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 996f70b95..646cbe035 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -30,53 +30,55 @@ def test_calculate_line_reactance(self): data = tools.calculate_line_reactance(np.array([2, 3]), 3, 2) assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) - def test_voltage_drop(self): - data = tools.calculate_voltage_drop(50, 0.125, 0.36, 20, 0.9) - assert np.isclose(data, 0.67355 * 1e-3) - data = tools.calculate_voltage_drop( + def test_voltage_diff(self): + data = tools.calculate_voltage_diff_per_line(50, 0.125, 0.36, 20, -1, 0.9) + correct_value = 0.11105090491866049 + assert np.isclose(data, correct_value) + data = tools.calculate_voltage_diff_per_line( np.array([50, 50]), np.array([0.125, 0.125]), np.array([0.36, 0.36]), 20, + -1, 0.9, ) - assert_allclose(data, np.array([0.67355 * 1e-3, 0.67355 * 1e-3]), rtol=1e-5) - data = tools.calculate_voltage_drop(50, 0.125, 0.36, 40, 0.9) - assert np.isclose(data, 0.67355 * 1e-3 / 2) - data = tools.calculate_voltage_drop(100, 0.125, 0.36, 20, 0.9) - assert np.isclose(data, 0.67355 * 1e-3 * 2) - data = tools.calculate_voltage_drop( + assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) + data = tools.calculate_voltage_diff_per_line(50, 0.125, 0.36, 40, -1, 0.9) + assert np.isclose(data, correct_value / 2) + data = tools.calculate_voltage_diff_per_line(100, 0.125, 0.36, 20, -1, 0.9) + assert np.isclose(data, correct_value * 2) + data = tools.calculate_voltage_diff_per_line( np.array([100, 100]), np.array([0.125, 0.125]), np.array([0.36, 0.36]), np.array([20, 20]), + -1, 0.9, ) assert_allclose( - data, np.array([0.67355 * 1e-3 * 2, 0.67355 * 1e-3 * 2]), rtol=1e-5 + data, np.array([correct_value * 2, correct_value * 2]), rtol=1e-5 ) - def test_voltage_drop_percentage(self): - data = tools.voltage_drop_percentage(0.152, 0.360, 1, 1, 20, 50, 0.9) - assert np.isclose(data, 2.326224820444546e-5) - data = tools.voltage_drop_percentage( - np.array([0.152, 0.152]), np.array([0.360, 0.360]), 1, 1, 20, 50, 0.9 + def test_voltage_drop_pu(self): + data = tools.voltage_diff_pu(0.1, 0.350, 1, 1, 20, 50, 0.9, -1) + correct_value = 0.52589253567891375 * 1e-2 + assert np.isclose(data, correct_value) + data = tools.voltage_diff_pu( + np.array([0.1, 0.1]), np.array([0.35, 0.35]), 1, 1, 20, 50, 0.9, -1 ) - assert_allclose( - data, np.array([2.326224820444546e-5, 2.326224820444546e-5]), rtol=1e-5 - ) - data = tools.voltage_drop_percentage(0.152, 0.360, 2, 1, 20, 50, 0.9) - assert np.isclose(data, 2 * 2.326224820444546e-5) - data = tools.voltage_drop_percentage( - np.array([0.152, 0.152]), np.array([0.360, 0.360]), 2, 1, 20, 50, 0.9 + assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) + data = tools.voltage_diff_pu(0.1, 0.35, 2, 1, 20, 50, 0.9, -1) + assert np.isclose(data, 2 * correct_value) + data = tools.voltage_diff_pu( + np.array([0.1, 0.1]), np.array([0.35, 0.35]), 2, 1, 20, 50, 0.9, -1 ) assert_allclose( data, - np.array([2 * 2.326224820444546e-5, 2 * 2.326224820444546e-5]), + np.array([2 * correct_value, 2 * correct_value]), rtol=1e-5, ) - data = tools.voltage_drop_percentage(0.152, 0.360, 1, 2, 20, 50, 0.9) - assert np.isclose(data, 2.326224820444546e-5 / 2) + data = tools.voltage_diff_pu(0.1, 0.35, 1, 2, 20, 50, 0.9, -1) + assert np.isclose(data, correct_value / 2) def test_calculate_line_resistance(self): # test single line @@ -159,29 +161,36 @@ def test_select_cable(self): assert num_parallel_cables == 1 # length given - cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "mv", 5.1, 1000 - ) + cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 5.1, 2) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 - cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "mv", 40, 1000 - ) + cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 40, 1) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 + cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "lv", 0.18, 1) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 3 + cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "lv", 0.18, 1000 + self.edisgo, "lv", 0.18, 1, max_voltage_diff=0.01, max_cables=100 ) - assert cable_data.name == "NAYY 4x1x240" - assert num_parallel_cables == 5 + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 8 cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "lv", 0.18, 1000, max_voltage_drop=0.01, max_cables=100 + self.edisgo, + "lv", + 0.18, + 1, + max_voltage_diff=0.01, + max_cables=100, + cos_phi=1, + inductive_reactance=False, ) assert cable_data.name == "NAYY 4x1x300" - assert num_parallel_cables == 15 + assert num_parallel_cables == 12 def test_get_downstream_buses(self): # ######## test with LV bus ######## From a37c4d5c27e624cca3efc9f5667a8892d4e245d7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 13:42:59 +0200 Subject: [PATCH 100/141] use "difference" instead of "drop" in the variable names --- edisgo/tools/tools.py | 16 ++++++++-------- tests/tools/test_tools.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 2e1a87719..76a7cd640 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -202,7 +202,7 @@ def calculate_voltage_diff_per_line( cos_phi: float = 0.95, ) -> float | np.ndarray: """ - Calculate the voltage drop across a line in kV. + Calculate the voltage difference across a line in kV. Parameters ---------- @@ -223,7 +223,7 @@ def calculate_voltage_diff_per_line( Returns ------- float or array-like - Voltage drop in kV. + Voltage difference in kV. """ sin_phi = np.sqrt(1 - cos_phi**2) voltage_diff = np.abs( @@ -243,7 +243,7 @@ def voltage_diff_pu( sign: int = -1, ) -> float | np.ndarray: """ - Calculate the voltage drop per unit of nominal voltage. + Calculate the voltage difference per unit of nominal voltage. Parameters ---------- @@ -267,7 +267,7 @@ def voltage_diff_pu( Returns ------- float - Voltage drop in per unit of nominal voltage. + Voltage difference in per unit of nominal voltage. """ # Calculate total resistance and reactance for the given length and # number of parallel cables @@ -279,10 +279,10 @@ def voltage_diff_pu( s_max, r_total, x_total, v_nom, sign=sign, cos_phi=cos_phi ) - # Convert voltage drop to per unit of nominal voltage - voltage_drop_pu = delta_v / v_nom + # Convert voltage difference to per unit of nominal voltage + voltage_difference_pu = delta_v / v_nom - return voltage_drop_pu + return voltage_difference_pu def select_cable( @@ -313,7 +313,7 @@ def select_cable( length : float Length of the cable in km. Default: 0. max_voltage_diff : float - Maximum voltage drop in pu. Default: None. + Maximum voltage difference in pu. Default: None. max_cables : int Maximum number of parallel cables to consider. Default is 7. cos_phi : float diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 646cbe035..a2bf39751 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -59,7 +59,7 @@ def test_voltage_diff(self): data, np.array([correct_value * 2, correct_value * 2]), rtol=1e-5 ) - def test_voltage_drop_pu(self): + def test_voltage_diff_pu(self): data = tools.voltage_diff_pu(0.1, 0.350, 1, 1, 20, 50, 0.9, -1) correct_value = 0.52589253567891375 * 1e-2 assert np.isclose(data, correct_value) From 85bd5f11ff231ce50e4c905cfeff82c92790d9d2 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 15:30:50 +0200 Subject: [PATCH 101/141] adding type hints and optional arguments --- edisgo/opf/powermodels_opf.py | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/edisgo/opf/powermodels_opf.py b/edisgo/opf/powermodels_opf.py index 2d082f0cf..52299c80c 100644 --- a/edisgo/opf/powermodels_opf.py +++ b/edisgo/opf/powermodels_opf.py @@ -4,6 +4,8 @@ import subprocess import sys +from typing import List, Optional + import networkx as nx import numpy as np @@ -13,20 +15,21 @@ logger = logging.getLogger(__name__) -def find_meshes(edisgo_obj) -> list: +def find_meshes(edisgo_obj) -> Optional[List[List[int]]]: """ Find all meshes in the grid. Parameters ---------- - edisgo_obj : :class:`~edisgo.EDisGo` + edisgo_obj : EDisGo EDisGo object. Returns ------- - meshes : list + Optional[List[List[int]]] List of all meshes in the grid. - + Each mesh is represented as a list of node indices. + If no meshes are found, None is returned. """ meshes = nx.cycle_basis(edisgo_obj.to_graph()) if meshes: @@ -41,16 +44,16 @@ def find_meshes(edisgo_obj) -> list: def pm_optimize( edisgo_obj, - s_base=1, - flexible_cps=None, - flexible_hps=None, - flexible_loads=None, - flexible_storage_units=None, - opf_version=1, - method="soc", - warm_start=False, - silence_moi=False, -): + s_base: int = 1, + flexible_cps: Optional[np.ndarray] = None, + flexible_hps: Optional[np.ndarray] = None, + flexible_loads: Optional[np.ndarray] = None, + flexible_storage_units: Optional[np.ndarray] = None, + opf_version: int = 1, + method: str = "soc", + warm_start: bool = False, + silence_moi: bool = False, +) -> None: """ Run OPF for edisgo object in julia subprocess and write results of OPF to edisgo object. Results of OPF are time series of operation schedules of flexibilities. From 28fb71cc023ab78e4c53a9ad00ca30163fb02c5c Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 24 Jul 2024 15:31:47 +0200 Subject: [PATCH 102/141] Refactor test_find_meshes to use pytest.LogCaptureFixture --- tests/opf/test_powermodels_opf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/opf/test_powermodels_opf.py b/tests/opf/test_powermodels_opf.py index 931a7cb6b..2efb7d11c 100644 --- a/tests/opf/test_powermodels_opf.py +++ b/tests/opf/test_powermodels_opf.py @@ -338,7 +338,7 @@ def test_pm_optimize(self): ) ) - def test_find_meshes(self, caplog): + def test_find_meshes(self, caplog: pytest.LogCaptureFixture): meshes = find_meshes(self.edisgo) assert not meshes self.edisgo.topology.add_line( From 1b2301a5d7eea4a5c7baae4fb1da4cac1530dfc0 Mon Sep 17 00:00:00 2001 From: joda9 Date: Thu, 25 Jul 2024 09:00:16 +0200 Subject: [PATCH 103/141] source of formula added --- edisgo/tools/tools.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 76a7cd640..dea5d6735 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -226,9 +226,10 @@ def calculate_voltage_diff_per_line( Voltage difference in kV. """ sin_phi = np.sqrt(1 - cos_phi**2) + # Calculate the voltage difference using the formula from VDE-AR-N 4105 voltage_diff = np.abs( (s_max * 1e6 / (v_nom * 1e3)) * (r_total * cos_phi + sign * x_total * sin_phi) - ) + ) # in V return voltage_diff / 1e3 # Convert to kV From a0f01fb21b21c34f4f50d896406af247e7faedf0 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 30 Jul 2024 14:25:03 +0200 Subject: [PATCH 104/141] specified source for values --- edisgo/config/config_grid_default.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/edisgo/config/config_grid_default.cfg b/edisgo/config/config_grid_default.cfg index d1f082ca7..1cd829259 100644 --- a/edisgo/config/config_grid_default.cfg +++ b/edisgo/config/config_grid_default.cfg @@ -50,10 +50,10 @@ upper_limit_voltage_level_6 = 0.2 upper_limit_voltage_level_5 = 5.5 upper_limit_voltage_level_4 = 20.0 +# from VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04, table 3 lv_max_voltage_deviation = 0.03 -# from VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04 +# from VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09, 5.3.2 Zulässige Spannungsänderung mv_max_voltage_deviation = 0.02 -# from VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09 [disconnecting_point] From 3a23e5e488e261ddfcead7e2ef0539529bf3681f Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 30 Jul 2024 14:30:31 +0200 Subject: [PATCH 105/141] update tools to consistency variable names and minor fixes --- edisgo/tools/tools.py | 103 ++++++++++++++++++++++++------------------ 1 file changed, 59 insertions(+), 44 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index dea5d6735..789f3053a 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -14,7 +14,7 @@ from sqlalchemy.engine.base import Engine -from edisgo.flex_opt import exceptions +from edisgo.flex_opt import exceptions, q_control from edisgo.io.db import session_scope_egon_data, sql_grid_geom, sql_intersects from edisgo.tools import session_scope @@ -198,8 +198,8 @@ def calculate_voltage_diff_per_line( r_total: float | np.ndarray, x_total: float | np.ndarray, v_nom: float | np.ndarray, - sign: int = -1, - cos_phi: float = 0.95, + reactive_power_mode: str = "inductive", + power_factor: float = 0.95, ) -> float | np.ndarray: """ Calculate the voltage difference across a line in kV. @@ -209,14 +209,14 @@ def calculate_voltage_diff_per_line( s_max : float or array-like Apparent power the cable must carry in MVA. r_total : float or array-like - Total resistance in Ohms. + Total resistance of the line in Ohms. x_total : float or array-like - Total reactance in Ohms. + Total reactance of the line in Ohms. v_nom : float or array-like - Nominal voltage in kV. + Nominal voltage of the line in kV. sign : int, optional Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. - cos_phi : float, optional + power_factor : float, optional Power factor (cosine of the phase angle) of the load or generator. Default is 0.95. @@ -225,12 +225,13 @@ def calculate_voltage_diff_per_line( float or array-like Voltage difference in kV. """ - sin_phi = np.sqrt(1 - cos_phi**2) + sign = q_control.get_q_sign_generator(reactive_power_mode) + sin_phi = np.sqrt(1 - power_factor**2) # Calculate the voltage difference using the formula from VDE-AR-N 4105 - voltage_diff = np.abs( - (s_max * 1e6 / (v_nom * 1e3)) * (r_total * cos_phi + sign * x_total * sin_phi) - ) # in V - return voltage_diff / 1e3 # Convert to kV + voltage_diff = (s_max / (v_nom)) * ( + r_total * power_factor + sign * x_total * sin_phi + ) + return voltage_diff # in kV def voltage_diff_pu( @@ -240,8 +241,8 @@ def voltage_diff_pu( num_parallel: int, v_nom: float | np.ndarray, s_max: float | np.ndarray, - cos_phi: float = 0.95, - sign: int = -1, + power_factor: float = 0.95, + reactive_power_mode: str = "inductive", ) -> float | np.ndarray: """ Calculate the voltage difference per unit of nominal voltage. @@ -260,7 +261,7 @@ def voltage_diff_pu( Nominal voltage in kV. s_max : float Apparent power the cable must carry in MVA. - cos_phi : float, optional + power_factor : float, optional Cosine phi of the load or generator. Default: 0.95. sign : int, optional Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. @@ -277,7 +278,12 @@ def voltage_diff_pu( # Calculate the voltage drop or increase delta_v = calculate_voltage_diff_per_line( - s_max, r_total, x_total, v_nom, sign=sign, cos_phi=cos_phi + s_max, + r_total, + x_total, + v_nom, + reactive_power_mode=reactive_power_mode, + power_factor=power_factor, ) # Convert voltage difference to per unit of nominal voltage @@ -293,15 +299,17 @@ def select_cable( length: float = 0, max_voltage_diff: float | None = None, max_cables: int = 7, - cos_phi: float | None = 0.95, - inductive_reactance: bool = True, + power_factor: float | None = None, + component_type: str | None = None, + reactive_power_mode: str = "inductive", ) -> tuple[pd.Series, int]: """ - Selects suitable cable type and quantity using given apparent power. + Selects suitable cable type and quantity based on apparent power and + voltage deviation. - Cable is selected to be able to carry the given `apparent_power`, no load - factor is considered. Overhead lines are not considered in choosing a - suitable cable. + The cable is selected to carry the given `apparent_power` and to ensure + acceptable voltage deviation over the cable length. No load factor is + considered. Overhead lines are not considered in choosing a suitable cable. Parameters ---------- @@ -314,30 +322,37 @@ def select_cable( length : float Length of the cable in km. Default: 0. max_voltage_diff : float - Maximum voltage difference in pu. Default: None. + Maximum allowed voltage difference (p.u. of nominal voltage). + If None, it defaults to the value specified in the configuration file + under the `grid_connection` section for the respective voltage level. + Default: None. max_cables : int - Maximum number of parallel cables to consider. Default is 7. - cos_phi : float - Cosine phi of the load or generator. Default: 0.95. - inductive_reactance : bool - If True, inductive reactance is considered. Default - is True. If False, capacitive reactance is considered. + Maximum number of cables to consider. Default: 7. + power_factor : float + Power factor of the load. + component_type : str + Type of the component to be connected, used to obtain the default power factor + from the configuration. + possible options are 'gen', 'load', 'cp', 'hp' + reactive_power_mode : str + Mode of the reactive power. Default: 'inductive' Returns ------- - :pandas:`pandas.Series` - Series with attributes of selected cable as in equipment data and - cable type as series name. - int - Number of necessary parallel cables. - + tuple[pd.Series, int] + A tuple containing the selected cable type and the quantity needed. """ - if not cos_phi: - cos_phi = 0.95 - if inductive_reactance: - sign = -1 + if component_type is None: + component_type = level + "_load" + elif component_type in ["gen", "load", "cp", "hp"]: + component_type = level + "_" + component_type else: - sign = 1 + raise ValueError( + "Specified component type is not valid. " + "Must either be 'gen', 'load', 'cp' or 'hp'." + ) + if power_factor is None: + power_factor = edisgo_obj.config["reactive_power_factor"][component_type] if level == "mv": cable_data = edisgo_obj.topology.equipment_data["mv_cables"] available_cables = cable_data[ @@ -373,8 +388,8 @@ def select_cable( num_parallel=cable_count, v_nom=available_cables["U_n"], s_max=apparent_power, - cos_phi=cos_phi, - sign=sign, + power_factor=power_factor, + reactive_power_mode=reactive_power_mode, ) < max_voltage_diff ] @@ -397,8 +412,8 @@ def select_cable( num_parallel=cable_count, v_nom=available_cables["U_n"], s_max=apparent_power, - cos_phi=cos_phi, - sign=sign, + power_factor=power_factor, + reactive_power_mode=reactive_power_mode, ) < max_voltage_diff ] From 6e3cd06b1ea94629893783180d1d314a31349d05 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 30 Jul 2024 14:31:00 +0200 Subject: [PATCH 106/141] adding tests for cable_selection --- tests/tools/test_tools.py | 41 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index a2bf39751..ec5d12772 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -183,14 +183,49 @@ def test_select_cable(self): self.edisgo, "lv", 0.18, - 1, + length=1, max_voltage_diff=0.01, max_cables=100, - cos_phi=1, - inductive_reactance=False, + power_factor=1, + reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 12 + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, "lv", 0.18, length=1 + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 3 + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + power_factor=None, + reactive_power_mode="inductive", + component_type="gen", + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 8 + try: + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + power_factor=None, + reactive_power_mode="inductive", + component_type="fail", + ) + except ValueError as e: + assert ( + str(e) == "Specified component type is not valid. " + "Must either be 'gen', 'load', 'cp' or 'hp'." + ) def test_get_downstream_buses(self): # ######## test with LV bus ######## From c07dbe144345be3fd53fffd779f01f10fd1f7866 Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 1 Aug 2024 10:08:48 +0200 Subject: [PATCH 107/141] Add constraint for non-convex formulation --- edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl b/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl index e1db2d1bb..1245fe7bf 100644 --- a/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl +++ b/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl @@ -118,6 +118,23 @@ function constraint_max_line_loading(pm::AbstractSOCBFModelEdisgo, n::Int) end +function constraint_max_line_loading(pm::AbstractNCBFModelEdisgo, n::Int) + p = PowerModels.var(pm, n, :p) + q = PowerModels.var(pm, n, :q) + ll = PowerModels.var(pm, 1, :ll) + s_nom = Dict(i => get(branch, "rate_a", 1.0) for (i,branch) in PowerModels.ref(pm, n, :branch)) + + for (i,branch) in PowerModels.ref(pm, n, :branch) + f_bus = branch["f_bus"] + t_bus = branch["t_bus"] + f_idx = (i, f_bus, t_bus) + if !(branch["storage"]) + JuMP.@constraint(pm.model, (p[f_idx]^2 + q[f_idx]^2)/s_nom[i]^2 <= ll[f_idx]) + end + end +end + + function constraint_power_balance(pm::AbstractBFModelEdisgo, n::Int, i, bus_gens, bus_gens_nd, bus_gens_slack, bus_loads, bus_arcs_to, bus_arcs_from, bus_lines_to, bus_storage, bus_pg, bus_qg, bus_pg_nd, bus_qg_nd, bus_pd, bus_qd, branch_r, branch_x, bus_dsm, bus_hps, bus_cps, bus_storage_pf, bus_dsm_pf, bus_hps_pf, bus_cps_pf, bus_gen_nd_pf, bus_gen_d_pf, bus_loads_pf, branch_strg_pf) pt = get(PowerModels.var(pm, n), :p, Dict()); PowerModels._check_var_keys(pt, bus_arcs_to, "active power", "branch") qt = get(PowerModels.var(pm, n), :q, Dict()); PowerModels._check_var_keys(qt, bus_arcs_to, "reactive power", "branch") From b2399829f89d7b17815467604b46cf1ec2e7efbf Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 1 Aug 2024 10:10:44 +0200 Subject: [PATCH 108/141] Remove warm start for bus voltage as values are not set --- edisgo/opf/eDisGo_OPF.jl/src/core/data.jl | 3 --- 1 file changed, 3 deletions(-) diff --git a/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl b/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl index 1063cc1f4..f180f831a 100644 --- a/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl +++ b/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl @@ -1,7 +1,4 @@ function set_ac_bf_start_values!(network::Dict{String,<:Any}) - for (i,bus) in network["bus"] - bus["w_start"] = bus["w"] - end for (i,gen) in network["gen_nd"] gen["pgc_start"] = gen["pgc"] From c00da3089fcac22cd1f0cab142b2ed41fece15c9 Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 1 Aug 2024 10:15:16 +0200 Subject: [PATCH 109/141] Fix distribution of DSM --- edisgo/network/overlying_grid.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/edisgo/network/overlying_grid.py b/edisgo/network/overlying_grid.py index 77c9ccadd..241768a20 100644 --- a/edisgo/network/overlying_grid.py +++ b/edisgo/network/overlying_grid.py @@ -385,6 +385,16 @@ def distribute_overlying_grid_requirements(edisgo_obj): scaling_df_min = ( edisgo_obj.dsm.p_min.transpose() / edisgo_obj.dsm.p_min.sum(axis=1) ) + # in case p_max/p_min of all DSM loads is zero in an hour but there is + # positive/negative DSM from the overlying grid, this is not correctly + # distributed and may lead to large errors in the time series with the + # distributed DSM + # in the following this is corrected by assuming an equal distribution + # during those hours + equal_dist_factor = 1 / len(dsm_loads) + scaling_df_max.fillna(equal_dist_factor, inplace=True) + scaling_df_min.fillna(equal_dist_factor, inplace=True) + edisgo_copy.timeseries._loads_active_power.loc[:, dsm_loads] = ( edisgo_obj.timeseries._loads_active_power.loc[:, dsm_loads] + ( From 84047488ba0d4747ee809ab7f7953d4be8f4a22e Mon Sep 17 00:00:00 2001 From: joda9 Date: Fri, 2 Aug 2024 13:00:18 +0200 Subject: [PATCH 110/141] adding component type to select cable function --- edisgo/tools/tools.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 789f3053a..932956095 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -200,6 +200,7 @@ def calculate_voltage_diff_per_line( v_nom: float | np.ndarray, reactive_power_mode: str = "inductive", power_factor: float = 0.95, + component_type: str = "load", ) -> float | np.ndarray: """ Calculate the voltage difference across a line in kV. @@ -214,18 +215,28 @@ def calculate_voltage_diff_per_line( Total reactance of the line in Ohms. v_nom : float or array-like Nominal voltage of the line in kV. - sign : int, optional - Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. + reactive_power_mode : str, optional + Mode of the reactive power. Default: 'inductive'. + alternative: 'capacitive' power_factor : float, optional Power factor (cosine of the phase angle) of the load or generator. Default is 0.95. + component_type : str, optional + Type of the component to be connected, used to obtain the default reactive power + mode from the configuration. Default: 'load'. + alternative: 'gen' Returns ------- float or array-like Voltage difference in kV. """ - sign = q_control.get_q_sign_generator(reactive_power_mode) + if "gen" in component_type: + sign = q_control.get_q_sign_generator(reactive_power_mode) + elif "load" in component_type or "cp" in component_type or "hp" in component_type: + sign = q_control.get_q_sign_load(reactive_power_mode) + else: + raise ValueError("Component type not supported.") sin_phi = np.sqrt(1 - power_factor**2) # Calculate the voltage difference using the formula from VDE-AR-N 4105 voltage_diff = (s_max / (v_nom)) * ( @@ -243,6 +254,7 @@ def voltage_diff_pu( s_max: float | np.ndarray, power_factor: float = 0.95, reactive_power_mode: str = "inductive", + component_type: str = "load", ) -> float | np.ndarray: """ Calculate the voltage difference per unit of nominal voltage. @@ -263,8 +275,10 @@ def voltage_diff_pu( Apparent power the cable must carry in MVA. power_factor : float, optional Cosine phi of the load or generator. Default: 0.95. - sign : int, optional - Sign of the reactance. -1 for inductive and +1 for capacitive. Default is -1. + component_type : str, optional + Type of the component to be connected, used to obtain the default reactive power + mode from the configuration. Default: 'load'. + alternative: 'gen' Returns ------- @@ -284,6 +298,7 @@ def voltage_diff_pu( v_nom, reactive_power_mode=reactive_power_mode, power_factor=power_factor, + component_type=component_type, ) # Convert voltage difference to per unit of nominal voltage @@ -300,7 +315,7 @@ def select_cable( max_voltage_diff: float | None = None, max_cables: int = 7, power_factor: float | None = None, - component_type: str | None = None, + component_type: str | None = "load", reactive_power_mode: str = "inductive", ) -> tuple[pd.Series, int]: """ @@ -334,6 +349,7 @@ def select_cable( Type of the component to be connected, used to obtain the default power factor from the configuration. possible options are 'gen', 'load', 'cp', 'hp' + Default: 'load'. reactive_power_mode : str Mode of the reactive power. Default: 'inductive' @@ -344,6 +360,7 @@ def select_cable( """ if component_type is None: component_type = level + "_load" + elif component_type in ["gen", "load", "cp", "hp"]: component_type = level + "_" + component_type else: @@ -390,6 +407,7 @@ def select_cable( s_max=apparent_power, power_factor=power_factor, reactive_power_mode=reactive_power_mode, + component_type=component_type, ) < max_voltage_diff ] @@ -414,6 +432,7 @@ def select_cable( s_max=apparent_power, power_factor=power_factor, reactive_power_mode=reactive_power_mode, + component_type=component_type, ) < max_voltage_diff ] From f1a02e25133fac19ca9a97017cd255bcac814683 Mon Sep 17 00:00:00 2001 From: joda9 Date: Fri, 2 Aug 2024 13:00:55 +0200 Subject: [PATCH 111/141] adding component type to tests --- tests/tools/test_tools.py | 228 +++++++++++++++++++++++++++++++------- 1 file changed, 191 insertions(+), 37 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index ec5d12772..9116e5ccb 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -30,54 +30,142 @@ def test_calculate_line_reactance(self): data = tools.calculate_line_reactance(np.array([2, 3]), 3, 2) assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) - def test_voltage_diff(self): - data = tools.calculate_voltage_diff_per_line(50, 0.125, 0.36, 20, -1, 0.9) - correct_value = 0.11105090491866049 + def test_calculate_voltage_diff_per_line(self): + data = tools.calculate_voltage_diff_per_line( + s_max=50, + r_total=0.125, + x_total=0.36, + v_nom=20, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", + ) + correct_value = -0.11105090491866049 assert np.isclose(data, correct_value) data = tools.calculate_voltage_diff_per_line( - np.array([50, 50]), - np.array([0.125, 0.125]), - np.array([0.36, 0.36]), - 20, - -1, - 0.9, + s_max=np.array([50, 50]), + r_total=np.array([0.125, 0.125]), + x_total=np.array([0.36, 0.36]), + v_nom=20, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", ) assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) - data = tools.calculate_voltage_diff_per_line(50, 0.125, 0.36, 40, -1, 0.9) + data = tools.calculate_voltage_diff_per_line( + s_max=50, + r_total=0.125, + x_total=0.36, + v_nom=40, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", + ) assert np.isclose(data, correct_value / 2) - data = tools.calculate_voltage_diff_per_line(100, 0.125, 0.36, 20, -1, 0.9) + data = tools.calculate_voltage_diff_per_line( + s_max=100, + r_total=0.125, + x_total=0.36, + v_nom=20, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", + ) assert np.isclose(data, correct_value * 2) data = tools.calculate_voltage_diff_per_line( - np.array([100, 100]), - np.array([0.125, 0.125]), - np.array([0.36, 0.36]), - np.array([20, 20]), - -1, - 0.9, + s_max=np.array([100, 100]), + r_total=np.array([0.125, 0.125]), + x_total=np.array([0.36, 0.36]), + v_nom=np.array([20, 20]), + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", ) assert_allclose( data, np.array([correct_value * 2, correct_value * 2]), rtol=1e-5 ) + Phi = np.pi / 6 + arctanphi = np.arctan(Phi) + R = 0.125 + X = arctanphi * R + data = tools.calculate_voltage_diff_per_line( + s_max=-0.027, # 27 kW generator + r_total=R, + x_total=X, + v_nom=0.23, + reactive_power_mode="capacitive", + power_factor=0.95, + component_type="gen", + ) + data = data / 0.23 # convert to pu + # assert np.isclose(data, 0.2) def test_voltage_diff_pu(self): - data = tools.voltage_diff_pu(0.1, 0.350, 1, 1, 20, 50, 0.9, -1) + data = tools.voltage_diff_pu( + R_per_km=0.1, + L_per_km=0.350, + length=1, + num_parallel=1, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", + ) correct_value = 0.52589253567891375 * 1e-2 assert np.isclose(data, correct_value) data = tools.voltage_diff_pu( - np.array([0.1, 0.1]), np.array([0.35, 0.35]), 1, 1, 20, 50, 0.9, -1 + R_per_km=np.array([0.1, 0.1]), + L_per_km=np.array([0.35, 0.35]), + length=1, + num_parallel=1, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", ) assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) - data = tools.voltage_diff_pu(0.1, 0.35, 2, 1, 20, 50, 0.9, -1) + data = tools.voltage_diff_pu( + R_per_km=0.1, + L_per_km=0.35, + length=2, + num_parallel=1, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", + ) assert np.isclose(data, 2 * correct_value) data = tools.voltage_diff_pu( - np.array([0.1, 0.1]), np.array([0.35, 0.35]), 2, 1, 20, 50, 0.9, -1 + R_per_km=np.array([0.1, 0.1]), + L_per_km=np.array([0.35, 0.35]), + length=2, + num_parallel=1, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", ) assert_allclose( data, np.array([2 * correct_value, 2 * correct_value]), rtol=1e-5, ) - data = tools.voltage_diff_pu(0.1, 0.35, 1, 2, 20, 50, 0.9, -1) + + data = tools.voltage_diff_pu( + R_per_km=0.1, + L_per_km=0.35, + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", + ) assert np.isclose(data, correct_value / 2) def test_calculate_line_resistance(self): @@ -148,36 +236,90 @@ def test_drop_duplicated_columns(self): def test_select_cable(self): # no length given - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 5.1) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 5.1, + length=0, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", + ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 40) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 40, + length=0, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", + ) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "lv", 0.18) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=0, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", + ) assert cable_data.name == "NAYY 4x1x150" assert num_parallel_cables == 1 # length given - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 5.1, 2) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 5.1, + length=2, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", + ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 40, 1) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 40, + length=1, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", + ) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "lv", 0.18, 1) - assert cable_data.name == "NAYY 4x1x300" - assert num_parallel_cables == 3 - cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "lv", 0.18, 1, max_voltage_diff=0.01, max_cables=100 + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=None, + max_cables=7, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x300" - assert num_parallel_cables == 8 + assert num_parallel_cables == 5 cable_data, num_parallel_cables = tools.select_cable( self.edisgo, @@ -187,15 +329,26 @@ def test_select_cable(self): max_voltage_diff=0.01, max_cables=100, power_factor=1, + component_type="load", reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 12 + cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, "lv", 0.18, length=1 + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + power_factor=None, + component_type="load", + reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x300" - assert num_parallel_cables == 3 + assert num_parallel_cables == 14 + cable_data, num_parallel_cables = tools.select_cable( self.edisgo, "lv", @@ -204,11 +357,12 @@ def test_select_cable(self): max_voltage_diff=0.01, max_cables=100, power_factor=None, - reactive_power_mode="inductive", component_type="gen", + reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 8 + try: cable_data, num_parallel_cables = tools.select_cable( self.edisgo, @@ -218,8 +372,8 @@ def test_select_cable(self): max_voltage_diff=0.01, max_cables=100, power_factor=None, - reactive_power_mode="inductive", component_type="fail", + reactive_power_mode="inductive", ) except ValueError as e: assert ( From 9c9f4561970d7deeb1178ecff2fbcb1dcaa3fb2e Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 5 Aug 2024 11:35:34 +0200 Subject: [PATCH 112/141] defining return values --- edisgo/tools/tools.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 932956095..60a80ab77 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -229,7 +229,10 @@ def calculate_voltage_diff_per_line( Returns ------- float or array-like - Voltage difference in kV. + Voltage difference in kV. If positive, the voltage difference behaves like + expected, it rises for generators and drops for loads. If negative, + the voltage difference behaves counterintuitively, it drops for generators + and rises for loads. """ if "gen" in component_type: sign = q_control.get_q_sign_generator(reactive_power_mode) From bd9c87e107c1caec775babe9bae1e2e2e61f4c3b Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 5 Aug 2024 11:36:23 +0200 Subject: [PATCH 113/141] change name voltage_diff_pu --- edisgo/tools/tools.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 60a80ab77..e18d8c849 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -242,13 +242,11 @@ def calculate_voltage_diff_per_line( raise ValueError("Component type not supported.") sin_phi = np.sqrt(1 - power_factor**2) # Calculate the voltage difference using the formula from VDE-AR-N 4105 - voltage_diff = (s_max / (v_nom)) * ( - r_total * power_factor + sign * x_total * sin_phi - ) + voltage_diff = (s_max / v_nom) * (r_total * power_factor + sign * x_total * sin_phi) return voltage_diff # in kV -def voltage_diff_pu( +def voltage_diff_pu_per_line( R_per_km: float | np.ndarray, L_per_km: float | np.ndarray, length: float, @@ -401,7 +399,7 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_diff_pu( + voltage_diff_pu_per_line( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, @@ -426,7 +424,7 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_diff_pu( + voltage_diff_pu_per_line( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, From 803d7960c7837e0b58081994f672fe2a1c38fbb7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 5 Aug 2024 11:36:39 +0200 Subject: [PATCH 114/141] adding tests for coverage --- tests/tools/test_tools.py | 170 ++++++++++++++++++++++++++++++-------- 1 file changed, 135 insertions(+), 35 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 9116e5ccb..b183e0c70 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -31,77 +31,133 @@ def test_calculate_line_reactance(self): assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) def test_calculate_voltage_diff_per_line(self): + correct_value_positive_sign = 0.6523893665569375 + correct_value_negative_sign = 1.2016106334430623 + r_total = 0.412 + x_total = 0.252 data = tools.calculate_voltage_diff_per_line( s_max=50, - r_total=0.125, - x_total=0.36, + r_total=r_total, + x_total=x_total, v_nom=20, reactive_power_mode="inductive", power_factor=0.9, component_type="gen", ) - correct_value = -0.11105090491866049 - assert np.isclose(data, correct_value) + assert np.isclose(data, correct_value_positive_sign) data = tools.calculate_voltage_diff_per_line( s_max=np.array([50, 50]), - r_total=np.array([0.125, 0.125]), - x_total=np.array([0.36, 0.36]), + r_total=np.array([r_total, r_total]), + x_total=np.array([x_total, x_total]), v_nom=20, reactive_power_mode="inductive", power_factor=0.9, component_type="gen", ) - assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) + assert_allclose( + data, + np.array([correct_value_positive_sign, correct_value_positive_sign]), + rtol=1e-5, + ) data = tools.calculate_voltage_diff_per_line( s_max=50, - r_total=0.125, - x_total=0.36, + r_total=r_total, + x_total=x_total, v_nom=40, reactive_power_mode="inductive", power_factor=0.9, component_type="gen", ) - assert np.isclose(data, correct_value / 2) + assert np.isclose(data, correct_value_positive_sign / 2) data = tools.calculate_voltage_diff_per_line( s_max=100, - r_total=0.125, - x_total=0.36, + r_total=r_total, + x_total=x_total, v_nom=20, reactive_power_mode="inductive", power_factor=0.9, component_type="gen", ) - assert np.isclose(data, correct_value * 2) + assert np.isclose(data, correct_value_positive_sign * 2) data = tools.calculate_voltage_diff_per_line( s_max=np.array([100, 100]), - r_total=np.array([0.125, 0.125]), - x_total=np.array([0.36, 0.36]), + r_total=np.array([r_total, r_total]), + x_total=np.array([x_total, x_total]), v_nom=np.array([20, 20]), reactive_power_mode="inductive", power_factor=0.9, component_type="gen", ) assert_allclose( - data, np.array([correct_value * 2, correct_value * 2]), rtol=1e-5 + data, + np.array( + [correct_value_positive_sign * 2, correct_value_positive_sign * 2] + ), + rtol=1e-5, ) + data = tools.calculate_voltage_diff_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + reactive_power_mode="capacitive", + power_factor=0.9, + component_type="gen", + ) + assert np.isclose(data, correct_value_negative_sign * 2) + data = tools.calculate_voltage_diff_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="load", + ) + assert np.isclose(data, correct_value_negative_sign * 2) + data = tools.calculate_voltage_diff_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + reactive_power_mode="capacitive", + power_factor=0.9, + component_type="load", + ) + assert np.isclose(data, correct_value_positive_sign * 2) + try: + data = tools.calculate_voltage_diff_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="fail", + ) + except ValueError as e: + assert str(e) == "Component type not supported." + Phi = np.pi / 6 arctanphi = np.arctan(Phi) - R = 0.125 + R = r_total X = arctanphi * R + v_nom = 0.4 data = tools.calculate_voltage_diff_per_line( - s_max=-0.027, # 27 kW generator + s_max=0.027, # 27 kW generator r_total=R, x_total=X, - v_nom=0.23, - reactive_power_mode="capacitive", + v_nom=v_nom, + reactive_power_mode="inductive", power_factor=0.95, component_type="gen", ) - data = data / 0.23 # convert to pu - # assert np.isclose(data, 0.2) + assert np.isclose(data / v_nom, 0.022230950086158 / v_nom) - def test_voltage_diff_pu(self): - data = tools.voltage_diff_pu( + def test_voltage_diff_pu_per_line(self): + correct_value_negative_sign = 0.52589253567891375 * 1e-2 + correct_value_positive_sign = 0.017241074643210865 + data = tools.voltage_diff_pu_per_line( R_per_km=0.1, L_per_km=0.350, length=1, @@ -112,9 +168,8 @@ def test_voltage_diff_pu(self): reactive_power_mode="inductive", component_type="gen", ) - correct_value = 0.52589253567891375 * 1e-2 - assert np.isclose(data, correct_value) - data = tools.voltage_diff_pu( + assert np.isclose(data, correct_value_negative_sign) + data = tools.voltage_diff_pu_per_line( R_per_km=np.array([0.1, 0.1]), L_per_km=np.array([0.35, 0.35]), length=1, @@ -125,8 +180,12 @@ def test_voltage_diff_pu(self): reactive_power_mode="inductive", component_type="gen", ) - assert_allclose(data, np.array([correct_value, correct_value]), rtol=1e-5) - data = tools.voltage_diff_pu( + assert_allclose( + data, + np.array([correct_value_negative_sign, correct_value_negative_sign]), + rtol=1e-5, + ) + data = tools.voltage_diff_pu_per_line( R_per_km=0.1, L_per_km=0.35, length=2, @@ -137,8 +196,8 @@ def test_voltage_diff_pu(self): reactive_power_mode="inductive", component_type="gen", ) - assert np.isclose(data, 2 * correct_value) - data = tools.voltage_diff_pu( + assert np.isclose(data, 2 * correct_value_negative_sign) + data = tools.voltage_diff_pu_per_line( R_per_km=np.array([0.1, 0.1]), L_per_km=np.array([0.35, 0.35]), length=2, @@ -151,11 +210,26 @@ def test_voltage_diff_pu(self): ) assert_allclose( data, - np.array([2 * correct_value, 2 * correct_value]), + np.array( + [2 * correct_value_negative_sign, 2 * correct_value_negative_sign] + ), rtol=1e-5, ) - data = tools.voltage_diff_pu( + data = tools.voltage_diff_pu_per_line( + R_per_km=0.1, + L_per_km=0.35, + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="inductive", + component_type="gen", + ) + assert np.isclose(data, correct_value_negative_sign / 2) + + data = tools.voltage_diff_pu_per_line( R_per_km=0.1, L_per_km=0.35, length=1, @@ -164,9 +238,35 @@ def test_voltage_diff_pu(self): s_max=50, power_factor=0.9, reactive_power_mode="inductive", + component_type="load", + ) + assert np.isclose(data, correct_value_positive_sign / 2) + + data = tools.voltage_diff_pu_per_line( + R_per_km=0.1, + L_per_km=0.35, + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="capacitive", + component_type="load", + ) + assert np.isclose(data, correct_value_negative_sign / 2) + + data = tools.voltage_diff_pu_per_line( + R_per_km=0.1, + L_per_km=0.35, + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + power_factor=0.9, + reactive_power_mode="capacitive", component_type="gen", ) - assert np.isclose(data, correct_value / 2) + assert np.isclose(data, correct_value_positive_sign / 2) def test_calculate_line_resistance(self): # test single line @@ -244,7 +344,7 @@ def test_select_cable(self): max_voltage_diff=None, max_cables=7, power_factor=None, - component_type="load", + component_type=None, reactive_power_mode="inductive", ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" From 2115254263b7a351d7ebf678807a60aa215a4ed1 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 5 Aug 2024 13:53:54 +0200 Subject: [PATCH 115/141] Refactor voltage difference calculation to use per unit (pu) instead of kilovolts (kV) --- edisgo/tools/tools.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index e18d8c849..257230e05 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -193,7 +193,7 @@ def drop_duplicated_columns(df, keep="last"): return df.loc[:, ~df.columns.duplicated(keep=keep)] -def calculate_voltage_diff_per_line( +def calculate_voltage_diff_pu_per_line( s_max: float | np.ndarray, r_total: float | np.ndarray, x_total: float | np.ndarray, @@ -229,7 +229,7 @@ def calculate_voltage_diff_per_line( Returns ------- float or array-like - Voltage difference in kV. If positive, the voltage difference behaves like + Voltage difference in pu. If positive, the voltage difference behaves like expected, it rises for generators and drops for loads. If negative, the voltage difference behaves counterintuitively, it drops for generators and rises for loads. @@ -242,11 +242,13 @@ def calculate_voltage_diff_per_line( raise ValueError("Component type not supported.") sin_phi = np.sqrt(1 - power_factor**2) # Calculate the voltage difference using the formula from VDE-AR-N 4105 - voltage_diff = (s_max / v_nom) * (r_total * power_factor + sign * x_total * sin_phi) - return voltage_diff # in kV + voltage_diff = (s_max / (v_nom**2)) * ( + r_total * power_factor + sign * x_total * sin_phi + ) + return voltage_diff # in pu -def voltage_diff_pu_per_line( +def calculate_voltage_difference_pu_per_line_with_length( R_per_km: float | np.ndarray, L_per_km: float | np.ndarray, length: float, @@ -292,7 +294,7 @@ def voltage_diff_pu_per_line( x_total = calculate_line_reactance(L_per_km, length, num_parallel) # Calculate the voltage drop or increase - delta_v = calculate_voltage_diff_per_line( + delta_v = calculate_voltage_diff_pu_per_line( s_max, r_total, x_total, @@ -303,7 +305,7 @@ def voltage_diff_pu_per_line( ) # Convert voltage difference to per unit of nominal voltage - voltage_difference_pu = delta_v / v_nom + voltage_difference_pu = delta_v return voltage_difference_pu @@ -399,7 +401,7 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_diff_pu_per_line( + calculate_voltage_difference_pu_per_line_with_length( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, @@ -424,7 +426,7 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - voltage_diff_pu_per_line( + calculate_voltage_difference_pu_per_line_with_length( R_per_km=available_cables["R_per_km"], L_per_km=available_cables["L_per_km"], length=length, From e68b601b978de75af01a277510d0709b29e4f3f7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Mon, 5 Aug 2024 13:54:33 +0200 Subject: [PATCH 116/141] Refactor voltage difference calculation to use per unit (pu) instead of kilovolts (kV) --- tests/tools/test_tools.py | 71 ++++++++++++++++++++++++++------------- 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index b183e0c70..193ed4c5c 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -30,12 +30,12 @@ def test_calculate_line_reactance(self): data = tools.calculate_line_reactance(np.array([2, 3]), 3, 2) assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) - def test_calculate_voltage_diff_per_line(self): - correct_value_positive_sign = 0.6523893665569375 - correct_value_negative_sign = 1.2016106334430623 + def test_calculate_voltage_diff_pu_per_line(self): + correct_value_positive_sign = 0.03261946832784687 + correct_value_negative_sign = 0.06008053167215312 r_total = 0.412 x_total = 0.252 - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=50, r_total=r_total, x_total=x_total, @@ -45,7 +45,7 @@ def test_calculate_voltage_diff_per_line(self): component_type="gen", ) assert np.isclose(data, correct_value_positive_sign) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=np.array([50, 50]), r_total=np.array([r_total, r_total]), x_total=np.array([x_total, x_total]), @@ -59,7 +59,7 @@ def test_calculate_voltage_diff_per_line(self): np.array([correct_value_positive_sign, correct_value_positive_sign]), rtol=1e-5, ) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=50, r_total=r_total, x_total=x_total, @@ -68,8 +68,8 @@ def test_calculate_voltage_diff_per_line(self): power_factor=0.9, component_type="gen", ) - assert np.isclose(data, correct_value_positive_sign / 2) - data = tools.calculate_voltage_diff_per_line( + assert np.isclose(data, correct_value_positive_sign / 4) + data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, @@ -79,7 +79,7 @@ def test_calculate_voltage_diff_per_line(self): component_type="gen", ) assert np.isclose(data, correct_value_positive_sign * 2) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=np.array([100, 100]), r_total=np.array([r_total, r_total]), x_total=np.array([x_total, x_total]), @@ -95,7 +95,7 @@ def test_calculate_voltage_diff_per_line(self): ), rtol=1e-5, ) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, @@ -105,7 +105,7 @@ def test_calculate_voltage_diff_per_line(self): component_type="gen", ) assert np.isclose(data, correct_value_negative_sign * 2) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, @@ -115,7 +115,7 @@ def test_calculate_voltage_diff_per_line(self): component_type="load", ) assert np.isclose(data, correct_value_negative_sign * 2) - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, @@ -126,7 +126,7 @@ def test_calculate_voltage_diff_per_line(self): ) assert np.isclose(data, correct_value_positive_sign * 2) try: - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, @@ -143,7 +143,7 @@ def test_calculate_voltage_diff_per_line(self): R = r_total X = arctanphi * R v_nom = 0.4 - data = tools.calculate_voltage_diff_per_line( + data = tools.calculate_voltage_diff_pu_per_line( s_max=0.027, # 27 kW generator r_total=R, x_total=X, @@ -152,12 +152,35 @@ def test_calculate_voltage_diff_per_line(self): power_factor=0.95, component_type="gen", ) - assert np.isclose(data / v_nom, 0.022230950086158 / v_nom) + assert np.isclose(data, 0.055577375215395) + + # test the examples from VDE-AR-N 4105 attachment D + data = tools.calculate_voltage_diff_pu_per_line( + s_max=0.02, + r_total=0.2001, + x_total=0.1258, + v_nom=0.4, + reactive_power_mode="inductive", + power_factor=1, + component_type="gen", + ) + assert np.isclose(data, 0.025, rtol=1e-2) + + data = tools.calculate_voltage_diff_pu_per_line( + s_max=0.022, + r_total=0.2001, + x_total=0.1258, + v_nom=0.4, + reactive_power_mode="inductive", + power_factor=0.9, + component_type="gen", + ) + assert np.isclose(data, 0.0173, rtol=1e-2) - def test_voltage_diff_pu_per_line(self): + def test_calculate_voltage_difference_pu_per_line_with_length(self): correct_value_negative_sign = 0.52589253567891375 * 1e-2 correct_value_positive_sign = 0.017241074643210865 - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.350, length=1, @@ -169,7 +192,7 @@ def test_voltage_diff_pu_per_line(self): component_type="gen", ) assert np.isclose(data, correct_value_negative_sign) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=np.array([0.1, 0.1]), L_per_km=np.array([0.35, 0.35]), length=1, @@ -185,7 +208,7 @@ def test_voltage_diff_pu_per_line(self): np.array([correct_value_negative_sign, correct_value_negative_sign]), rtol=1e-5, ) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.35, length=2, @@ -197,7 +220,7 @@ def test_voltage_diff_pu_per_line(self): component_type="gen", ) assert np.isclose(data, 2 * correct_value_negative_sign) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=np.array([0.1, 0.1]), L_per_km=np.array([0.35, 0.35]), length=2, @@ -216,7 +239,7 @@ def test_voltage_diff_pu_per_line(self): rtol=1e-5, ) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.35, length=1, @@ -229,7 +252,7 @@ def test_voltage_diff_pu_per_line(self): ) assert np.isclose(data, correct_value_negative_sign / 2) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.35, length=1, @@ -242,7 +265,7 @@ def test_voltage_diff_pu_per_line(self): ) assert np.isclose(data, correct_value_positive_sign / 2) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.35, length=1, @@ -255,7 +278,7 @@ def test_voltage_diff_pu_per_line(self): ) assert np.isclose(data, correct_value_negative_sign / 2) - data = tools.voltage_diff_pu_per_line( + data = tools.calculate_voltage_difference_pu_per_line_with_length( R_per_km=0.1, L_per_km=0.35, length=1, From 06f74d587880b4ed067d9c5c660ca03008c51aa2 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 13:22:19 +0200 Subject: [PATCH 117/141] LV grids are now georeferenced --- edisgo/network/grids.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/edisgo/network/grids.py b/edisgo/network/grids.py index 7f466b3dc..bcb09bfc9 100644 --- a/edisgo/network/grids.py +++ b/edisgo/network/grids.py @@ -650,10 +650,3 @@ def draw( else: plt.savefig(filename, dpi=150, bbox_inches="tight", pad_inches=0.1) plt.close() - - @property - def geopandas(self): - """ - TODO: Remove this as soon as LVGrids are georeferenced - """ - raise NotImplementedError("LV Grids are not georeferenced yet.") From 3d6d9212c38d4fb2a653a5940172b0f1ba902406 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 13:22:57 +0200 Subject: [PATCH 118/141] Geopandas property only returns geopandas for given grid --- edisgo/network/grids.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/edisgo/network/grids.py b/edisgo/network/grids.py index bcb09bfc9..42a50916d 100644 --- a/edisgo/network/grids.py +++ b/edisgo/network/grids.py @@ -90,17 +90,16 @@ def graph(self): @property def geopandas(self): """ - Returns components as :geopandas:`GeoDataFrame`\\ s + Returns components as :geopandas:`GeoDataFrame`\\ s. Returns container with :geopandas:`GeoDataFrame`\\ s containing all georeferenced components within the grid. Returns ------- - :class:`~.tools.geopandas_helper.GeoPandasGridContainer` or \ - list(:class:`~.tools.geopandas_helper.GeoPandasGridContainer`) + :class:`~.tools.geopandas_helper.GeoPandasGridContainer` Data container with GeoDataFrames containing all georeferenced components - within the grid(s). + within the grid. """ return to_geopandas(self) From c3c7e2ad8970d2960cd75dcd55eb44dda808ba39 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 13:24:46 +0200 Subject: [PATCH 119/141] Using lists can lead to bugs here in case they are not sorted the same way, it is therefore safer to add geoms as new columns --- edisgo/tools/geopandas_helper.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/edisgo/tools/geopandas_helper.py b/edisgo/tools/geopandas_helper.py index 6c48a62e6..d12c5d0c3 100644 --- a/edisgo/tools/geopandas_helper.py +++ b/edisgo/tools/geopandas_helper.py @@ -211,18 +211,22 @@ def to_geopandas(grid_obj: Grid): # convert lines_df lines_df = grid_obj.lines_df - geom_0 = lines_df.merge( - buses_gdf[["geometry"]], left_on="bus0", right_index=True - ).geometry - geom_1 = lines_df.merge( - buses_gdf[["geometry"]], left_on="bus1", right_index=True - ).geometry - - geometry = [ - LineString([point_0, point_1]) for point_0, point_1 in list(zip(geom_0, geom_1)) - ] - - lines_gdf = gpd.GeoDataFrame(lines_df.assign(geometry=geometry), crs=f"EPSG:{srid}") + lines_gdf = lines_df.merge( + buses_gdf[["geometry", "v_nom"]].rename(columns={"geometry": "geom_0"}), + left_on="bus0", + right_index=True, + ) + lines_gdf = lines_gdf.merge( + buses_gdf[["geometry"]].rename(columns={"geometry": "geom_1"}), + left_on="bus1", + right_index=True, + ) + lines_gdf["geometry"] = lines_gdf.apply( + lambda _: LineString([_["geom_0"], _["geom_1"]]), axis=1 + ) + lines_gdf = gpd.GeoDataFrame( + lines_gdf.drop(columns=["geom_0", "geom_1"]), crs=f"EPSG:{srid}" + ) return GeoPandasGridContainer( crs=f"EPSG:{srid}", From db8657dd3448bb534c6e5f11244e7f10b0ece9cf Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 13:25:58 +0200 Subject: [PATCH 120/141] Just use same index as original dataframe, which makes testing easier --- edisgo/tools/geopandas_helper.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/edisgo/tools/geopandas_helper.py b/edisgo/tools/geopandas_helper.py index d12c5d0c3..cfef9ae0a 100644 --- a/edisgo/tools/geopandas_helper.py +++ b/edisgo/tools/geopandas_helper.py @@ -204,9 +204,7 @@ def to_geopandas(grid_obj: Grid): crs=f"EPSG:{srid}", ) if components_dict[component.replace("_df", "_gdf")].empty: - components_dict[component.replace("_df", "_gdf")].index = components_dict[ - component.replace("_df", "_gdf") - ].index.astype(object) + components_dict[component.replace("_df", "_gdf")].index = attr.index # convert lines_df lines_df = grid_obj.lines_df From e8720e10ef890210a6f5c9930fd0d6262cd3a6b4 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 14:05:36 +0200 Subject: [PATCH 121/141] Make srid a parameter to allow using the function for Topology as well --- edisgo/network/grids.py | 2 +- edisgo/network/topology.py | 6 ++++-- edisgo/tools/geopandas_helper.py | 15 ++++++++------- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/edisgo/network/grids.py b/edisgo/network/grids.py index 42a50916d..20d50ed74 100644 --- a/edisgo/network/grids.py +++ b/edisgo/network/grids.py @@ -102,7 +102,7 @@ def geopandas(self): within the grid. """ - return to_geopandas(self) + return to_geopandas(self, srid=self.edisgo_obj.topology.grid_district["srid"]) @property def station(self): diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index 2462314f0..bbe98c9e4 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -15,7 +15,7 @@ from edisgo.network.components import Switch from edisgo.network.grids import LVGrid, MVGrid -from edisgo.tools import geo, networkx_helper +from edisgo.tools import geo, geopandas_helper, networkx_helper from edisgo.tools.tools import ( calculate_apparent_power, calculate_line_reactance, @@ -2777,7 +2777,9 @@ def to_geopandas(self, mode: str = "mv"): within the grid(s). """ - if mode == "mv": + if mode is None: + return geopandas_helper.to_geopandas(self, srid=self.grid_district["srid"]) + elif mode == "mv": return self.mv_grid.geopandas elif mode == "lv": raise NotImplementedError("LV Grids are not georeferenced yet.") diff --git a/edisgo/tools/geopandas_helper.py b/edisgo/tools/geopandas_helper.py index cfef9ae0a..14066d560 100644 --- a/edisgo/tools/geopandas_helper.py +++ b/edisgo/tools/geopandas_helper.py @@ -11,6 +11,7 @@ if TYPE_CHECKING: from edisgo.network.grids import Grid + from edisgo.network.topology import Topology COMPONENTS: list[str] = [ "generators_df", @@ -162,14 +163,17 @@ def plot(self): raise NotImplementedError -def to_geopandas(grid_obj: Grid): +def to_geopandas(grid_obj: Grid | Topology, srid: int) -> GeoPandasGridContainer: """ - Translates all DataFrames with geolocations within a Grid class to GeoDataFrames. + Translates all DataFrames with geolocations within a grid topology to GeoDataFrames. Parameters ---------- - grid_obj : :class:`~.network.grids.Grid` - Grid object to transform. + grid_obj : :class:`~.network.grids.Grid` or :class:`~.network.topology.Topology` + Grid or Topology object to transform. + srid : int + SRID (spatial reference ID) of x and y coordinates of buses. Usually given in + Topology.grid_district["srid"]. Returns ------- @@ -178,9 +182,6 @@ def to_geopandas(grid_obj: Grid): their geolocation. """ - # get srid id - srid = grid_obj._edisgo_obj.topology.grid_district["srid"] - # convert buses_df buses_df = grid_obj.buses_df buses_df = buses_df.assign( From 5fbc41c497bd30ec24221b9c8c4ad52a80fbfb59 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 14:13:41 +0200 Subject: [PATCH 122/141] Enable getting geodataframes for LV grids as well --- edisgo/network/topology.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index bbe98c9e4..f60193ad3 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -2756,7 +2756,7 @@ def to_graph(self): self.transformers_df, ) - def to_geopandas(self, mode: str = "mv"): + def to_geopandas(self, mode: str = None, lv_grid_id: int = None): """ Returns components as :geopandas:`GeoDataFrame`\\ s. @@ -2766,15 +2766,21 @@ def to_geopandas(self, mode: str = "mv"): Parameters ---------- mode : str - Return mode. If mode is "mv" the mv components are returned. If mode is "lv" - a generator with a container per lv grid is returned. Default: "mv" + If `mode` is None, GeoDataFrames for the MV grid and underlying LV grids is + returned. If `mode` is "mv", GeoDataFrames for only the MV grid are + returned. If `mode` is "lv", GeoDataFrames for the LV grid specified through + `lv_grid_id` are returned. + Default: None. + lv_grid_id : int + Only needs to be provided in case `mode` is "lv". In that case `lv_grid_id` + gives the LV grid ID as integer of the LV grid for which to return the + geodataframes. Returns ------- - :class:`~.tools.geopandas_helper.GeoPandasGridContainer` or \ - list(:class:`~.tools.geopandas_helper.GeoPandasGridContainer`) + :class:`~.tools.geopandas_helper.GeoPandasGridContainer` Data container with GeoDataFrames containing all georeferenced components - within the grid(s). + within the grid. """ if mode is None: @@ -2782,9 +2788,7 @@ def to_geopandas(self, mode: str = "mv"): elif mode == "mv": return self.mv_grid.geopandas elif mode == "lv": - raise NotImplementedError("LV Grids are not georeferenced yet.") - # for lv_grid in self.mv_grid.lv_grids: - # yield lv_grid.geopandas + return self.get_lv_grid(name=lv_grid_id).geopandas else: raise ValueError(f"{mode} is not valid. See docstring for more info.") From bf24775b77d7516caf355655988286600ea0a84f Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 14:17:32 +0200 Subject: [PATCH 123/141] Complete type hinting --- edisgo/network/topology.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index f60193ad3..997a3b9fc 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -5,6 +5,7 @@ import random import warnings +from typing import TYPE_CHECKING from zipfile import ZipFile import networkx as nx @@ -30,6 +31,9 @@ from shapely.ops import transform from shapely.wkt import loads as wkt_loads +if TYPE_CHECKING: + from edisgo.tools.geopandas_helper import GeoPandasGridContainer + logger = logging.getLogger(__name__) COLUMNS = { @@ -2756,7 +2760,9 @@ def to_graph(self): self.transformers_df, ) - def to_geopandas(self, mode: str = None, lv_grid_id: int = None): + def to_geopandas( + self, mode: str | None = None, lv_grid_id: int | None = None + ) -> GeoPandasGridContainer: """ Returns components as :geopandas:`GeoDataFrame`\\ s. From 5178018c7fb181cd84ba730d1854701f52456adc Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 14:22:02 +0200 Subject: [PATCH 124/141] Add further tests for to_geopandas --- tests/network/test_topology.py | 41 +++++++++++++----- tests/tools/test_geopandas_helper.py | 65 ++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 11 deletions(-) create mode 100644 tests/tools/test_geopandas_helper.py diff --git a/tests/network/test_topology.py b/tests/network/test_topology.py index 0baf02f34..a3422db45 100644 --- a/tests/network/test_topology.py +++ b/tests/network/test_topology.py @@ -951,9 +951,17 @@ def setup_class(self): self.edisgo.set_time_series_worst_case_analysis() def test_to_geopandas(self): - geopandas_container = self.edisgo.topology.to_geopandas() + # further tests of to_geopandas are conducted in test_geopandas_helper.py - assert isinstance(geopandas_container, GeoPandasGridContainer) + # set up edisgo object with georeferenced LV + edisgo_geo = EDisGo( + ding0_grid=pytest.ding0_test_network_3_path, legacy_ding0_grids=False + ) + test_suits = { + "mv": {"edisgo_obj": self.edisgo, "mode": "mv", "lv_grid_id": None}, + "lv": {"edisgo_obj": edisgo_geo, "mode": "lv", "lv_grid_id": 1164120002}, + "mv+lv": {"edisgo_obj": edisgo_geo, "mode": None, "lv_grid_id": None}, + } attrs = [ "buses_gdf", @@ -964,19 +972,30 @@ def test_to_geopandas(self): "transformers_gdf", ] - for attr_str in attrs: - attr = getattr(geopandas_container, attr_str) - grid_attr = getattr( - self.edisgo.topology.mv_grid, attr_str.replace("_gdf", "_df") + for test_suit, params in test_suits.items(): + # call to_geopandas() function with different settings + geopandas_container = params["edisgo_obj"].topology.to_geopandas( + mode=params["mode"], lv_grid_id=params["lv_grid_id"] ) - assert isinstance(attr, GeoDataFrame) + assert isinstance(geopandas_container, GeoPandasGridContainer) - common_cols = list(set(attr.columns).intersection(grid_attr.columns)) + # check that content of geodataframes is the same as content of original + # dataframes + for attr_str in attrs: + grid = getattr(geopandas_container, "grid") + attr = getattr(geopandas_container, attr_str) + grid_attr = getattr(grid, attr_str.replace("_gdf", "_df")) - assert_frame_equal( - attr[common_cols], grid_attr[common_cols], check_names=False - ) + assert isinstance(attr, GeoDataFrame) + + common_cols = list(set(attr.columns).intersection(grid_attr.columns)) + + assert_frame_equal( + attr[common_cols].sort_index(), + grid_attr[common_cols].sort_index(), + check_names=False, + ) def test_from_csv(self): """ diff --git a/tests/tools/test_geopandas_helper.py b/tests/tools/test_geopandas_helper.py new file mode 100644 index 000000000..a9aca9427 --- /dev/null +++ b/tests/tools/test_geopandas_helper.py @@ -0,0 +1,65 @@ +import pytest + +from edisgo import EDisGo +from edisgo.tools import geopandas_helper + + +class TestGeopandasHelper: + @classmethod + def setup_class(self): + self.edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) + + def test_to_geopandas(self): + # further tests of this function are conducted in test_topology.py + # test MV grid + data = geopandas_helper.to_geopandas(self.edisgo.topology.mv_grid, 4326) + assert data.buses_gdf.shape[0] == self.edisgo.topology.mv_grid.buses_df.shape[0] + assert ( + data.buses_gdf.shape[1] + == self.edisgo.topology.mv_grid.buses_df.shape[1] + 1 - 2 + ) + assert "geometry" in data.buses_gdf.columns + + assert data.lines_gdf.shape[0] == self.edisgo.topology.mv_grid.lines_df.shape[0] + assert ( + data.lines_gdf.shape[1] + == self.edisgo.topology.mv_grid.lines_df.shape[1] + 2 + ) + assert "geometry" in data.lines_gdf.columns + + assert data.loads_gdf.shape[0] == self.edisgo.topology.mv_grid.loads_df.shape[0] + assert ( + data.loads_gdf.shape[1] + == self.edisgo.topology.mv_grid.loads_df.shape[1] + 2 + ) + assert "geometry" in data.loads_gdf.columns + + assert ( + data.generators_gdf.shape[0] + == self.edisgo.topology.mv_grid.generators_df.shape[0] + ) + assert ( + data.generators_gdf.shape[1] + == self.edisgo.topology.mv_grid.generators_df.shape[1] + 2 + ) + assert "geometry" in data.generators_gdf.columns + + assert ( + data.storage_units_gdf.shape[0] + == self.edisgo.topology.mv_grid.storage_units_df.shape[0] + ) + assert ( + data.storage_units_gdf.shape[1] + == self.edisgo.topology.mv_grid.storage_units_df.shape[1] + 2 + ) + assert "geometry" in data.storage_units_gdf.columns + + assert ( + data.transformers_gdf.shape[0] + == self.edisgo.topology.mv_grid.transformers_df.shape[0] + ) + assert ( + data.transformers_gdf.shape[1] + == self.edisgo.topology.mv_grid.transformers_df.shape[1] + 2 + ) + assert "geometry" in data.transformers_gdf.columns From 93e3592e5e24c6144d2e00ce1ff78fe01229ab2e Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 16:40:10 +0200 Subject: [PATCH 125/141] Rename config parameters to make wording more consistent --- edisgo/config/config_timeseries_default.cfg | 40 ++++++------- edisgo/flex_opt/q_control.py | 64 +++++++++------------ edisgo/io/powermodels_io.py | 31 +++++----- edisgo/network/timeseries.py | 40 +++++++------ edisgo/tools/config.py | 2 +- edisgo/tools/tools.py | 19 ++++-- tests/flex_opt/test_q_control.py | 20 +++---- tests/io/test_powermodels_io.py | 6 +- 8 files changed, 111 insertions(+), 111 deletions(-) diff --git a/edisgo/config/config_timeseries_default.cfg b/edisgo/config/config_timeseries_default.cfg index bfb97351c..a1373e895 100644 --- a/edisgo/config/config_timeseries_default.cfg +++ b/edisgo/config/config_timeseries_default.cfg @@ -88,16 +88,16 @@ lv_load_case_hp = 1.0 # =========================== # power factors used to generate reactive power time series for loads and generators -mv_gen = 0.9 -mv_load = 0.9 -mv_storage = 0.9 -mv_cp = 1.0 -mv_hp = 1.0 -lv_gen = 0.95 -lv_load = 0.95 -lv_storage = 0.95 -lv_cp = 1.0 -lv_hp = 1.0 +mv_generator = 0.9 +mv_conventional_load = 0.9 +mv_storage_unit = 0.9 +mv_charging_point = 1.0 +mv_heat_pump = 1.0 +lv_generator = 0.95 +lv_conventional_load = 0.95 +lv_storage_unit = 0.95 +lv_charging_point = 1.0 +lv_heat_pump = 1.0 [reactive_power_mode] @@ -105,16 +105,16 @@ lv_hp = 1.0 # =========================== # power factor modes used to generate reactive power time series for loads and generators -mv_gen = inductive -mv_load = inductive -mv_storage = inductive -mv_cp = inductive -mv_hp = inductive -lv_gen = inductive -lv_load = inductive -lv_storage = inductive -lv_cp = inductive -lv_hp = inductive +mv_generator = inductive +mv_conventional_load = inductive +mv_storage_unit = inductive +mv_charging_point = inductive +mv_heat_pump = inductive +lv_generator = inductive +lv_conventional_load = inductive +lv_storage_unit = inductive +lv_charging_point = inductive +lv_heat_pump = inductive [demandlib] diff --git a/edisgo/flex_opt/q_control.py b/edisgo/flex_opt/q_control.py index a6e98578e..cfc353fb0 100644 --- a/edisgo/flex_opt/q_control.py +++ b/edisgo/flex_opt/q_control.py @@ -92,22 +92,6 @@ def fixed_cosphi(active_power, q_sign, power_factor): return active_power * q_sign * np.tan(np.arccos(power_factor)) -def _get_component_dict(): - """ - Helper function to translate from component type term used in function to the one - used in the config files. - - """ - comp_dict = { - "generators": "gen", - "storage_units": "storage", - "conventional_loads": "load", - "charging_points": "cp", - "heat_pumps": "hp", - } - return comp_dict - - def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): """ Gets fixed cosphi default reactive power factor for each given component. @@ -123,8 +107,8 @@ def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): All components must have the same `component_type`. component_type : str The component type determines the reactive power factor and mode used. - Possible options are 'generators', 'storage_units', 'conventional_loads', - 'charging_points', and 'heat_pumps'. + Possible options are 'generator', 'storage_unit', 'conventional_load', + 'charging_point', and 'heat_pump'. configs : :class:`~.tools.config.Config` eDisGo configuration data. @@ -136,22 +120,28 @@ def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): """ reactive_power_factor = configs["reactive_power_factor"] - comp_dict = _get_component_dict() - - if component_type in comp_dict.keys(): - comp = comp_dict[component_type] + allowed_types = [ + "generator", + "storage_unit", + "conventional_load", + "charging_point", + "heat_pump", + ] + if component_type in allowed_types: # write series with power factor for each component power_factor = pd.Series(index=comp_df.index, dtype=float) for voltage_level in comp_df.voltage_level.unique(): cols = comp_df.index[comp_df.voltage_level == voltage_level] if len(cols) > 0: - power_factor[cols] = reactive_power_factor[f"{voltage_level}_{comp}"] + power_factor[cols] = reactive_power_factor[ + f"{voltage_level}_{component_type}" + ] return power_factor else: raise ValueError( "Given 'component_type' is not valid. Valid options are " - "'generators','storage_units', 'conventional_loads', 'charging_points', " - "and 'heat_pumps'." + "'generator', 'storage_unit', 'conventional_load', 'charging_point', " + "and 'heat_pump'." ) @@ -170,8 +160,8 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): All components must have the same `component_type`. component_type : str The component type determines the reactive power factor and mode used. - Possible options are 'generators', 'storage_units', 'conventional_loads', - 'charging_points', and 'heat_pumps'. + Possible options are 'generator', 'storage_unit', 'conventional_load', + 'charging_point', and 'heat_pump'. configs : :class:`~.tools.config.Config` eDisGo configuration data. @@ -183,17 +173,15 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): """ reactive_power_mode = configs["reactive_power_mode"] - comp_dict = _get_component_dict() q_sign_dict = { - "generators": get_q_sign_generator, - "storage_units": get_q_sign_generator, - "conventional_loads": get_q_sign_load, - "charging_points": get_q_sign_load, - "heat_pumps": get_q_sign_load, + "generator": get_q_sign_generator, + "storage_unit": get_q_sign_generator, + "conventional_load": get_q_sign_load, + "charging_point": get_q_sign_load, + "heat_pump": get_q_sign_load, } - if component_type in comp_dict.keys(): - comp = comp_dict[component_type] + if component_type in q_sign_dict.keys(): get_q_sign = q_sign_dict[component_type] # write series with power factor for each component q_sign = pd.Series(index=comp_df.index, dtype=float) @@ -201,12 +189,12 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): cols = comp_df.index[comp_df.voltage_level == voltage_level] if len(cols) > 0: q_sign[cols] = get_q_sign( - reactive_power_mode[f"{voltage_level}_{comp}"] + reactive_power_mode[f"{voltage_level}_{component_type}"] ) return q_sign else: raise ValueError( "Given 'component_type' is not valid. Valid options are " - "'generators','storage_units', 'conventional_loads', 'charging_points', " - "and 'heat_pumps'." + "'generator', 'storage_unit', 'conventional_load', 'charging_point', " + "and 'heat_pump'." ) diff --git a/edisgo/io/powermodels_io.py b/edisgo/io/powermodels_io.py index b0fb22781..48b5c13cc 100644 --- a/edisgo/io/powermodels_io.py +++ b/edisgo/io/powermodels_io.py @@ -667,7 +667,7 @@ def _build_gen(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): gen.bus[gen_i], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "gen") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "generator") q = [ sign * np.tan(np.arccos(pf)) * gen.p_nom[gen_i], sign * np.tan(np.arccos(pf)) * gen.p_nom_min[gen_i], @@ -704,7 +704,7 @@ def _build_gen(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): psa_net.storage_units.bus.loc[inflexible_storage_units[stor_i]], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") p_g = max( [ psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], @@ -837,7 +837,7 @@ def _build_branch(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): flexible_storage_units=flexible_storage_units, ) # retrieve power factor from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") pm["branch"][str(stor_i + len(branches.index) + 1)] = { "name": "bss_branch_" + str(stor_i + 1), @@ -919,22 +919,22 @@ def _build_load( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "conventional_load" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") elif ( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "heat_pump" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "hp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "heat_pump") elif ( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "charging_point" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "cp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "charging_point") else: logger.warning( "No type specified for load {}. Power factor and sign will" "be set for conventional load.".format(loads_df.index[load_i]) ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") p_d = psa_net.loads_t.p_set[loads_df.index[load_i]] q_d = psa_net.loads_t.q_set[loads_df.index[load_i]] pm["load"][str(load_i + 1)] = { @@ -955,7 +955,7 @@ def _build_load( psa_net.storage_units.bus.loc[inflexible_storage_units[stor_i]], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") p_d = -min( [ psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], @@ -1036,7 +1036,7 @@ def _build_battery_storage( flexible_storage_units=flexible_storage_units, ) # retrieve power factor from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") e_max = ( psa_net.storage_units.p_nom.loc[flexible_storage_units[stor_i]] * psa_net.storage_units.max_hours.loc[flexible_storage_units[stor_i]] @@ -1151,7 +1151,7 @@ def _build_electromobility(edisgo_obj, psa_net, pm, s_base, flexible_cps): eta = edisgo_obj.electromobility.simbev_config_df.eta_cp.values[0] except IndexError: eta = 0.9 - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "cp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "charging_point") q = ( sign * np.tan(np.arccos(pf)) @@ -1218,7 +1218,7 @@ def _build_heatpump(psa_net, pm, edisgo_obj, s_base, flexible_hps): for hp_i in np.arange(len(heat_df.index)): idx_bus = _mapping(psa_net, edisgo_obj, heat_df.bus[hp_i]) # retrieve power factor and sign from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "hp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "heat_pump") q = sign * np.tan(np.arccos(pf)) * heat_df.p_set[hp_i] p_d = heat_df2[heat_df.index[hp_i]] pm["heatpumps"][str(hp_i + 1)] = { @@ -1446,7 +1446,7 @@ def _build_dsm(edisgo_obj, psa_net, pm, s_base, flexible_loads): for dsm_i in np.arange(len(dsm_df.index)): idx_bus = _mapping(psa_net, edisgo_obj, dsm_df.bus[dsm_i]) # retrieve power factor and sign from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") p_max = edisgo_obj.dsm.p_max[dsm_df.index[dsm_i]] p_min = edisgo_obj.dsm.p_min[dsm_df.index[dsm_i]] e_min = edisgo_obj.dsm.e_min[dsm_df.index[dsm_i]] @@ -2053,7 +2053,8 @@ def _get_pf(edisgo_obj, pm, idx_bus, kind): idx_bus : int Bus index from PowerModels bus dictionary. kind : str - Must be one of ["gen", "load", "storage", "hp", "cp"]. + Must be one of ["generator", "conventional_load", "storage_unit", "heat_pump", + "charging_point"]. Returns ------- @@ -2067,12 +2068,12 @@ def _get_pf(edisgo_obj, pm, idx_bus, kind): sign = edisgo_obj.config._data["reactive_power_mode"][ "{}_{}".format(grid_level, kind) ] - if kind in ["gen", "storage"]: + if kind in ["generator", "storage_unit"]: if sign == "inductive": sign = -1 else: sign = 1 - elif kind in ["load", "hp", "cp"]: + elif kind in ["conventional_load", "heat_pump", "charging_point"]: if sign == "inductive": sign = 1 else: diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index 15337f06b..6cf4a7b47 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -821,10 +821,10 @@ def _worst_case_generators(self, cases, df, configs): # reactive power # get worst case configurations for each generator power_factor = q_control._fixed_cosphi_default_power_factor( - df, "generators", configs + df, "generator", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "generators", configs + df, "generator", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -899,10 +899,10 @@ def _worst_case_conventional_load(self, cases, df, configs): # reactive power # get worst case configurations for each load power_factor = q_control._fixed_cosphi_default_power_factor( - df, "conventional_loads", configs + df, "conventional_load", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "conventional_loads", configs + df, "conventional_load", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -999,10 +999,10 @@ def _worst_case_charging_points(self, cases, df, configs): # reactive power # get worst case configurations for each charging point power_factor = q_control._fixed_cosphi_default_power_factor( - df, "charging_points", configs + df, "charging_point", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "charging_points", configs + df, "charging_point", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1077,10 +1077,10 @@ def _worst_case_heat_pumps(self, cases, df, configs): # reactive power # get worst case configurations for each heat pump power_factor = q_control._fixed_cosphi_default_power_factor( - df, "heat_pumps", configs + df, "heat_pump", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "heat_pumps", configs + df, "heat_pump", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1153,10 +1153,10 @@ def _worst_case_storage_units(self, cases, df, configs): # reactive power # get worst case configurations for each load power_factor = q_control._fixed_cosphi_default_power_factor( - df, "storage_units", configs + df, "storage_unit", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "storage_units", configs + df, "storage_unit", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1606,7 +1606,7 @@ def _get_q_sign_and_power_factor_per_component( q_sign, q_control._fixed_cosphi_default_reactive_power_sign( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1616,17 +1616,17 @@ def _get_q_sign_and_power_factor_per_component( power_factor, q_control._fixed_cosphi_default_power_factor( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] ) else: q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, type, edisgo_object.config + df, type[:-1], edisgo_object.config ) power_factor = q_control._fixed_cosphi_default_power_factor( - df, type, edisgo_object.config + df, type[:-1], edisgo_object.config ) elif isinstance(parametrisation, pd.DataFrame): # check if all given components exist in network and only use existing @@ -1659,7 +1659,7 @@ def _get_q_sign_and_power_factor_per_component( q_sign, default_func( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1668,7 +1668,9 @@ def _get_q_sign_and_power_factor_per_component( q_sign = pd.concat( [ q_sign, - default_func(df, type, edisgo_object.config), + default_func( + df, type[:-1], edisgo_object.config + ), ] ) else: @@ -1692,7 +1694,7 @@ def _get_q_sign_and_power_factor_per_component( power_factor, default_func( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1701,7 +1703,9 @@ def _get_q_sign_and_power_factor_per_component( power_factor = pd.concat( [ power_factor, - default_func(df, type, edisgo_object.config), + default_func( + df, type[:-1], edisgo_object.config + ), ] ) else: diff --git a/edisgo/tools/config.py b/edisgo/tools/config.py index 54fc08a33..7494943a3 100644 --- a/edisgo/tools/config.py +++ b/edisgo/tools/config.py @@ -116,7 +116,7 @@ class Config: Get reactive power factor for generators in the MV network - >>> config['reactive_power_factor']['mv_gen'] + >>> config['reactive_power_factor']['mv_generator'] """ diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 257230e05..11ef56701 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -234,9 +234,9 @@ def calculate_voltage_diff_pu_per_line( the voltage difference behaves counterintuitively, it drops for generators and rises for loads. """ - if "gen" in component_type: + if component_type in ["generator", "storage_unit"]: sign = q_control.get_q_sign_generator(reactive_power_mode) - elif "load" in component_type or "cp" in component_type or "hp" in component_type: + elif component_type in ["conventional_load", "heat_pump", "charging_point"]: sign = q_control.get_q_sign_load(reactive_power_mode) else: raise ValueError("Component type not supported.") @@ -362,14 +362,21 @@ def select_cable( A tuple containing the selected cable type and the quantity needed. """ if component_type is None: - component_type = level + "_load" - - elif component_type in ["gen", "load", "cp", "hp"]: + component_type = level + "conventional_load" + + elif component_type in [ + "generator", + "conventional_load", + "charging_point", + "heat_pump", + "storage_unit", + ]: component_type = level + "_" + component_type else: raise ValueError( "Specified component type is not valid. " - "Must either be 'gen', 'load', 'cp' or 'hp'." + "Must either be 'generator', 'conventional_load', 'charging_point', " + "'heat_pump' or 'storage_unit'." ) if power_factor is None: power_factor = edisgo_obj.config["reactive_power_factor"][component_type] diff --git a/tests/flex_opt/test_q_control.py b/tests/flex_opt/test_q_control.py index 9595ec1c6..a028c1544 100644 --- a/tests/flex_opt/test_q_control.py +++ b/tests/flex_opt/test_q_control.py @@ -101,7 +101,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="generators" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="generators", configs=config + comp_df=df, component_type="generator", configs=config ) assert pf.shape == (3,) @@ -112,7 +112,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="loads" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="conventional_loads", configs=config + comp_df=df, component_type="conventional_load", configs=config ) assert pf.shape == (3,) @@ -123,7 +123,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="charging_points" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="charging_points", configs=config + comp_df=df, component_type="charging_point", configs=config ) assert pf.shape == (3,) @@ -134,7 +134,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="heat_pumps" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="heat_pumps", configs=config + comp_df=df, component_type="heat_pump", configs=config ) assert pf.shape == (3,) @@ -145,7 +145,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="storage_units" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="storage_units", configs=config + comp_df=df, component_type="storage_unit", configs=config ) assert pf.shape == (3,) @@ -165,7 +165,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="generators" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="generators", configs=config + comp_df=df, component_type="generator", configs=config ) assert pf.shape == (3,) @@ -176,7 +176,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="conventional_loads" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="conventional_loads", configs=config + comp_df=df, component_type="conventional_load", configs=config ) assert pf.shape == (3,) @@ -187,7 +187,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="charging_points" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="charging_points", configs=config + comp_df=df, component_type="charging_point", configs=config ) assert pf.shape == (3,) @@ -198,7 +198,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="heat_pumps" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="heat_pumps", configs=config + comp_df=df, component_type="heat_pump", configs=config ) assert pf.shape == (3,) @@ -209,7 +209,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="storage_units" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="storage_units", configs=config + comp_df=df, component_type="storage_unit", configs=config ) assert pf.shape == (3,) diff --git a/tests/io/test_powermodels_io.py b/tests/io/test_powermodels_io.py index 4d0d7842a..b3bfab036 100644 --- a/tests/io/test_powermodels_io.py +++ b/tests/io/test_powermodels_io.py @@ -310,7 +310,7 @@ def test__get_pf(self): # test mode None powermodels_network, hv_flex_dict = powermodels_io.to_powermodels(self.edisgo) - for component in ["gen", "storage"]: + for component in ["generator", "storage_unit"]: pf, sign = powermodels_io._get_pf( self.edisgo, powermodels_network, 1, component ) @@ -322,10 +322,10 @@ def test__get_pf(self): assert pf == 0.95 assert sign == -1 - for component in ["hp", "cp"]: + for component in ["heat_pump", "charging_point"]: for bus in [1, 29]: pf, sign = powermodels_io._get_pf( - self.edisgo, powermodels_network, 1, component + self.edisgo, powermodels_network, bus, component ) assert pf == 1 assert sign == 1 From 59f12c868c651aa2374e2fdd38558f171a8f2213 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 16:40:42 +0200 Subject: [PATCH 126/141] Minor docstring fix --- edisgo/flex_opt/q_control.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/edisgo/flex_opt/q_control.py b/edisgo/flex_opt/q_control.py index cfc353fb0..07183a7d5 100644 --- a/edisgo/flex_opt/q_control.py +++ b/edisgo/flex_opt/q_control.py @@ -100,7 +100,7 @@ def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): ----------- comp_df : :pandas:`pandas.DataFrame` Dataframe with component names (in the index) of all components - reactive power factor needs to be set. Only required column is + reactive power factor needs to be set for. Only required column is column 'voltage_level', giving the voltage level the component is in (the voltage level can be set using the function :func:`~.tools.tools.assign_voltage_level_to_component`). From ace46a93ce519a4642d40056b990b64040253e16 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 7 Aug 2024 16:41:11 +0200 Subject: [PATCH 127/141] Simplify getting config values --- edisgo/io/powermodels_io.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/edisgo/io/powermodels_io.py b/edisgo/io/powermodels_io.py index 48b5c13cc..541e01ffc 100644 --- a/edisgo/io/powermodels_io.py +++ b/edisgo/io/powermodels_io.py @@ -2062,12 +2062,8 @@ def _get_pf(edisgo_obj, pm, idx_bus, kind): """ grid_level = pm["bus"][str(idx_bus)]["grid_level"] - pf = edisgo_obj.config._data["reactive_power_factor"][ - "{}_{}".format(grid_level, kind) - ] - sign = edisgo_obj.config._data["reactive_power_mode"][ - "{}_{}".format(grid_level, kind) - ] + pf = edisgo_obj.config["reactive_power_factor"]["{}_{}".format(grid_level, kind)] + sign = edisgo_obj.config["reactive_power_mode"]["{}_{}".format(grid_level, kind)] if kind in ["generator", "storage_unit"]: if sign == "inductive": sign = -1 From 789c0a65ca4a31d8d0afb90deceda4229a41c6ae Mon Sep 17 00:00:00 2001 From: joda9 Date: Thu, 8 Aug 2024 12:23:21 +0200 Subject: [PATCH 128/141] Update demandlib dependency to latest version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f04a4b97c..082fbe8b3 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ def read(fname): requirements = [ "contextily", "dash < 2.9.0", - "demandlib < 0.2.0", + "demandlib", "descartes", "egoio >= 0.4.7", "geoalchemy2 < 0.7.0", From aa49064cb5ea4483a5bec828a6f6aeb8d69e39f2 Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 13:58:15 +0200 Subject: [PATCH 129/141] Adapt calculate_voltage_diff_pu_per_line to have q_sign and power_factor as necessary inputs --- edisgo/tools/tools.py | 37 ++++++++++++++----------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 11ef56701..beb9fa512 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -14,7 +14,7 @@ from sqlalchemy.engine.base import Engine -from edisgo.flex_opt import exceptions, q_control +from edisgo.flex_opt import exceptions from edisgo.io.db import session_scope_egon_data, sql_grid_geom, sql_intersects from edisgo.tools import session_scope @@ -198,12 +198,11 @@ def calculate_voltage_diff_pu_per_line( r_total: float | np.ndarray, x_total: float | np.ndarray, v_nom: float | np.ndarray, - reactive_power_mode: str = "inductive", - power_factor: float = 0.95, - component_type: str = "load", + q_sign: int, + power_factor: float, ) -> float | np.ndarray: """ - Calculate the voltage difference across a line in kV. + Calculate the voltage difference across a line in p.u.. Parameters ---------- @@ -215,35 +214,27 @@ def calculate_voltage_diff_pu_per_line( Total reactance of the line in Ohms. v_nom : float or array-like Nominal voltage of the line in kV. - reactive_power_mode : str, optional - Mode of the reactive power. Default: 'inductive'. - alternative: 'capacitive' - power_factor : float, optional - Power factor (cosine of the phase angle) of the load or generator. - Default is 0.95. - component_type : str, optional - Type of the component to be connected, used to obtain the default reactive power - mode from the configuration. Default: 'load'. - alternative: 'gen' + q_sign : int + `q_sign` defines whether the reactive power is positive or + negative and must either be -1 or +1. In case of generators and storage units, + inductive reactive power is negative. In case of loads, inductive reactive + power is positive. + power_factor : :pandas:`pandas.Series` or float + Ratio of real to apparent power. Returns ------- float or array-like - Voltage difference in pu. If positive, the voltage difference behaves like + Voltage difference in p.u.. If positive, the voltage difference behaves like expected, it rises for generators and drops for loads. If negative, the voltage difference behaves counterintuitively, it drops for generators and rises for loads. + """ - if component_type in ["generator", "storage_unit"]: - sign = q_control.get_q_sign_generator(reactive_power_mode) - elif component_type in ["conventional_load", "heat_pump", "charging_point"]: - sign = q_control.get_q_sign_load(reactive_power_mode) - else: - raise ValueError("Component type not supported.") sin_phi = np.sqrt(1 - power_factor**2) # Calculate the voltage difference using the formula from VDE-AR-N 4105 voltage_diff = (s_max / (v_nom**2)) * ( - r_total * power_factor + sign * x_total * sin_phi + r_total * power_factor + q_sign * x_total * sin_phi ) return voltage_diff # in pu From 0d1683d49911de9d6ea3f58edc892ec89baca6dd Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 14:00:08 +0200 Subject: [PATCH 130/141] Change function calculate_voltage_diff_pu_per_line_from_type to have line type and component type as inputs --- edisgo/tools/tools.py | 109 +++++++++++++++++++++++++----------------- 1 file changed, 66 insertions(+), 43 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index beb9fa512..31553ae01 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -14,7 +14,7 @@ from sqlalchemy.engine.base import Engine -from edisgo.flex_opt import exceptions +from edisgo.flex_opt import exceptions, q_control from edisgo.io.db import session_scope_egon_data, sql_grid_geom, sql_intersects from edisgo.tools import session_scope @@ -239,67 +239,94 @@ def calculate_voltage_diff_pu_per_line( return voltage_diff # in pu -def calculate_voltage_difference_pu_per_line_with_length( - R_per_km: float | np.ndarray, - L_per_km: float | np.ndarray, +def calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj: EDisGo, + cable_names: str | np.ndarray, length: float, num_parallel: int, v_nom: float | np.ndarray, s_max: float | np.ndarray, - power_factor: float = 0.95, - reactive_power_mode: str = "inductive", - component_type: str = "load", + component_type: str, ) -> float | np.ndarray: """ - Calculate the voltage difference per unit of nominal voltage. + Calculate the voltage difference across a line in p.u. depending on line type + and component type. + + This function serves as a helper function for function + :py:func:`calculate_voltage_diff_pu_per_line`, as it automatically obtains the + equipment data per line type from the provided equipment data and default reactive + power data per component type from the configuration files. Parameters ---------- - R_per_km : float or array-like + edisgo_obj : :class:`~.EDisGo` + cable_names : str or array-like Resistance per kilometer of the cable in ohm/km. - L_per_km : float or array-like - Inductance per kilometer of the cable in mH/km. length : float Length of the cable in km. num_parallel : int Number of parallel cables. v_nom : int - Nominal voltage in kV. + Nominal voltage of the cable(s) in kV. s_max : float Apparent power the cable must carry in MVA. - power_factor : float, optional - Cosine phi of the load or generator. Default: 0.95. component_type : str, optional Type of the component to be connected, used to obtain the default reactive power - mode from the configuration. Default: 'load'. - alternative: 'gen' + mode and power factor from the configuration file. If this is given, + `reactive_power_mode` and `power_factor` are not considered. + Possible options are "generator", "conventional_load", "charging_point", + "heat_pump" and "storage_unit". Returns ------- - float - Voltage difference in per unit of nominal voltage. + float or array-like + Voltage difference in p.u.. If positive, the voltage difference behaves like + expected, it rises for generators and drops for loads. If negative, + the voltage difference behaves counterintuitively, it drops for generators + and rises for loads. + """ - # Calculate total resistance and reactance for the given length and - # number of parallel cables - r_total = calculate_line_resistance(R_per_km, length, num_parallel) - x_total = calculate_line_reactance(L_per_km, length, num_parallel) + # calculate total resistance and reactance for the given length and + # number of parallel cables for given cable types + config_type = "mv_cables" if v_nom > 1.0 else "lv_cables" + cable_data = edisgo_obj.topology.equipment_data[config_type] + r_total = calculate_line_resistance( + cable_data.loc[cable_names, "R_per_km"], length, num_parallel + ) + x_total = calculate_line_reactance( + cable_data.loc[cable_names, "L_per_km"], length, num_parallel + ) + + # get sign of reactive power based on component type + config_type = f"mv_{component_type}" if v_nom > 1.0 else f"lv_{component_type}" + if component_type in ["generator", "storage_unit"]: + q_sign = q_control.get_q_sign_generator( + edisgo_obj.config["reactive_power_mode"][config_type] + ) + elif component_type in ["conventional_load", "heat_pump", "charging_point"]: + q_sign = q_control.get_q_sign_load( + edisgo_obj.config["reactive_power_mode"][config_type] + ) + else: + raise ValueError( + "Specified component type is not valid. " + "Must either be 'generator', 'conventional_load', 'charging_point', " + "'heat_pump' or 'storage_unit'." + ) + + # get power factor based on component type + power_factor = edisgo_obj.config["reactive_power_factor"][config_type] # Calculate the voltage drop or increase - delta_v = calculate_voltage_diff_pu_per_line( + return calculate_voltage_diff_pu_per_line( s_max, r_total, x_total, v_nom, - reactive_power_mode=reactive_power_mode, - power_factor=power_factor, - component_type=component_type, + q_sign, + power_factor, ) - # Convert voltage difference to per unit of nominal voltage - voltage_difference_pu = delta_v - - return voltage_difference_pu - def select_cable( edisgo_obj: EDisGo, @@ -399,15 +426,13 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - calculate_voltage_difference_pu_per_line_with_length( - R_per_km=available_cables["R_per_km"], - L_per_km=available_cables["L_per_km"], + calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=edisgo_obj, + cable_names=suitable_cables.index, length=length, num_parallel=cable_count, - v_nom=available_cables["U_n"], + v_nom=available_cables["U_n"].values[0], s_max=apparent_power, - power_factor=power_factor, - reactive_power_mode=reactive_power_mode, component_type=component_type, ) < max_voltage_diff @@ -424,15 +449,13 @@ def select_cable( ] if length != 0: suitable_cables = suitable_cables[ - calculate_voltage_difference_pu_per_line_with_length( - R_per_km=available_cables["R_per_km"], - L_per_km=available_cables["L_per_km"], + calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=edisgo_obj, + cable_names=available_cables.index, length=length, num_parallel=cable_count, - v_nom=available_cables["U_n"], + v_nom=available_cables["U_n"].values[0], s_max=apparent_power, - power_factor=power_factor, - reactive_power_mode=reactive_power_mode, component_type=component_type, ) < max_voltage_diff From 095e3d0bb13f582e00de87ec20f7cae8814c34cb Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 14:00:51 +0200 Subject: [PATCH 131/141] Change function select_cable to retrieve reactive power behavior from configs always --- edisgo/tools/tools.py | 59 +++++++++++++++---------------------------- 1 file changed, 21 insertions(+), 38 deletions(-) diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 31553ae01..061e02ed5 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -332,20 +332,18 @@ def select_cable( edisgo_obj: EDisGo, level: str, apparent_power: float, - length: float = 0, + component_type: str | None = None, + length: float = 0.0, max_voltage_diff: float | None = None, max_cables: int = 7, - power_factor: float | None = None, - component_type: str | None = "load", - reactive_power_mode: str = "inductive", ) -> tuple[pd.Series, int]: """ Selects suitable cable type and quantity based on apparent power and voltage deviation. The cable is selected to carry the given `apparent_power` and to ensure - acceptable voltage deviation over the cable length. No load factor is - considered. Overhead lines are not considered in choosing a suitable cable. + acceptable voltage deviation over the cable. + Overhead lines are not considered in choosing a suitable cable. Parameters ---------- @@ -355,49 +353,33 @@ def select_cable( 'lv'. apparent_power : float Apparent power the cable must carry in MVA. + component_type : str + Type of the component to be connected. Possible options are "generator", + "conventional_load", "charging_point", "heat_pump" or "storage_unit". + Only needed in case a cable length is given and thus the voltage difference over + the cable can be taken into account for selecting a suitable cable. In that case + it is used to obtain the default power factor and reactive power mode from the + configuration files in sections `reactive_power_factor` and + `reactive_power_mode`. + Default: None. length : float Length of the cable in km. Default: 0. max_voltage_diff : float - Maximum allowed voltage difference (p.u. of nominal voltage). + Maximum allowed voltage difference in p.u.. If None, it defaults to the value specified in the configuration file - under the `grid_connection` section for the respective voltage level. + under the `grid_connection` section for the respective voltage level + (lv_max_voltage_deviation for LV and mv_max_voltage_deviation for MV). Default: None. max_cables : int Maximum number of cables to consider. Default: 7. - power_factor : float - Power factor of the load. - component_type : str - Type of the component to be connected, used to obtain the default power factor - from the configuration. - possible options are 'gen', 'load', 'cp', 'hp' - Default: 'load'. - reactive_power_mode : str - Mode of the reactive power. Default: 'inductive' Returns ------- - tuple[pd.Series, int] - A tuple containing the selected cable type and the quantity needed. + tuple[:pandas:`pandas.Series`, int] + A tuple containing information on the selected cable type and the quantity + needed. + """ - if component_type is None: - component_type = level + "conventional_load" - - elif component_type in [ - "generator", - "conventional_load", - "charging_point", - "heat_pump", - "storage_unit", - ]: - component_type = level + "_" + component_type - else: - raise ValueError( - "Specified component type is not valid. " - "Must either be 'generator', 'conventional_load', 'charging_point', " - "'heat_pump' or 'storage_unit'." - ) - if power_factor is None: - power_factor = edisgo_obj.config["reactive_power_factor"][component_type] if level == "mv": cable_data = edisgo_obj.topology.equipment_data["mv_cables"] available_cables = cable_data[ @@ -417,6 +399,7 @@ def select_cable( raise ValueError( "Specified voltage level is not valid. Must either be 'mv' or 'lv'." ) + cable_count = 1 suitable_cables = available_cables[ calculate_apparent_power( From 2875aa736681f649e2d90e8993f5f3b70618551c Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 14:01:11 +0200 Subject: [PATCH 132/141] Adapt tests to changes --- tests/tools/test_tools.py | 240 +++++++++----------------------------- 1 file changed, 54 insertions(+), 186 deletions(-) diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 193ed4c5c..66216ca6d 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -35,58 +35,49 @@ def test_calculate_voltage_diff_pu_per_line(self): correct_value_negative_sign = 0.06008053167215312 r_total = 0.412 x_total = 0.252 + + # test generator, float data = tools.calculate_voltage_diff_pu_per_line( s_max=50, r_total=r_total, x_total=x_total, v_nom=20, - reactive_power_mode="inductive", + q_sign=-1, power_factor=0.9, - component_type="gen", ) assert np.isclose(data, correct_value_positive_sign) + # test generator, array data = tools.calculate_voltage_diff_pu_per_line( s_max=np.array([50, 50]), r_total=np.array([r_total, r_total]), x_total=np.array([x_total, x_total]), v_nom=20, - reactive_power_mode="inductive", + q_sign=-1, power_factor=0.9, - component_type="gen", ) assert_allclose( data, np.array([correct_value_positive_sign, correct_value_positive_sign]), rtol=1e-5, ) + # test generator, float, higher voltage data = tools.calculate_voltage_diff_pu_per_line( s_max=50, r_total=r_total, x_total=x_total, v_nom=40, - reactive_power_mode="inductive", + q_sign=-1, power_factor=0.9, - component_type="gen", ) assert np.isclose(data, correct_value_positive_sign / 4) - data = tools.calculate_voltage_diff_pu_per_line( - s_max=100, - r_total=r_total, - x_total=x_total, - v_nom=20, - reactive_power_mode="inductive", - power_factor=0.9, - component_type="gen", - ) - assert np.isclose(data, correct_value_positive_sign * 2) + # test generator, array, larger cable data = tools.calculate_voltage_diff_pu_per_line( s_max=np.array([100, 100]), r_total=np.array([r_total, r_total]), x_total=np.array([x_total, x_total]), v_nom=np.array([20, 20]), - reactive_power_mode="inductive", + q_sign=-1, power_factor=0.9, - component_type="gen", ) assert_allclose( data, @@ -95,64 +86,26 @@ def test_calculate_voltage_diff_pu_per_line(self): ), rtol=1e-5, ) + # test generator, capacitive data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, v_nom=20, - reactive_power_mode="capacitive", - power_factor=0.9, - component_type="gen", - ) - assert np.isclose(data, correct_value_negative_sign * 2) - data = tools.calculate_voltage_diff_pu_per_line( - s_max=100, - r_total=r_total, - x_total=x_total, - v_nom=20, - reactive_power_mode="inductive", + q_sign=1, power_factor=0.9, - component_type="load", ) assert np.isclose(data, correct_value_negative_sign * 2) + # test load, capacitive data = tools.calculate_voltage_diff_pu_per_line( s_max=100, r_total=r_total, x_total=x_total, v_nom=20, - reactive_power_mode="capacitive", + q_sign=-1, power_factor=0.9, - component_type="load", ) assert np.isclose(data, correct_value_positive_sign * 2) - try: - data = tools.calculate_voltage_diff_pu_per_line( - s_max=100, - r_total=r_total, - x_total=x_total, - v_nom=20, - reactive_power_mode="inductive", - power_factor=0.9, - component_type="fail", - ) - except ValueError as e: - assert str(e) == "Component type not supported." - - Phi = np.pi / 6 - arctanphi = np.arctan(Phi) - R = r_total - X = arctanphi * R - v_nom = 0.4 - data = tools.calculate_voltage_diff_pu_per_line( - s_max=0.027, # 27 kW generator - r_total=R, - x_total=X, - v_nom=v_nom, - reactive_power_mode="inductive", - power_factor=0.95, - component_type="gen", - ) - assert np.isclose(data, 0.055577375215395) # test the examples from VDE-AR-N 4105 attachment D data = tools.calculate_voltage_diff_pu_per_line( @@ -160,9 +113,8 @@ def test_calculate_voltage_diff_pu_per_line(self): r_total=0.2001, x_total=0.1258, v_nom=0.4, - reactive_power_mode="inductive", + q_sign=-1, power_factor=1, - component_type="gen", ) assert np.isclose(data, 0.025, rtol=1e-2) @@ -171,65 +123,60 @@ def test_calculate_voltage_diff_pu_per_line(self): r_total=0.2001, x_total=0.1258, v_nom=0.4, - reactive_power_mode="inductive", + q_sign=-1, power_factor=0.9, - component_type="gen", ) assert np.isclose(data, 0.0173, rtol=1e-2) - def test_calculate_voltage_difference_pu_per_line_with_length(self): - correct_value_negative_sign = 0.52589253567891375 * 1e-2 - correct_value_positive_sign = 0.017241074643210865 - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.350, + def test_calculate_voltage_diff_pu_per_line_from_type(self): + correct_value_negative_sign = 0.4916578234319946 * 1e-2 + correct_value_positive_sign = 0.017583421765680056 + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", length=1, num_parallel=1, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="gen", + component_type="generator", ) assert np.isclose(data, correct_value_negative_sign) - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=np.array([0.1, 0.1]), - L_per_km=np.array([0.35, 0.35]), + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names=np.array( + ["NA2XS(FL)2Y 3x1x300 RM/25", "NA2XS(FL)2Y 3x1x300 RM/25"] + ), length=1, num_parallel=1, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="gen", + component_type="generator", ) assert_allclose( data, np.array([correct_value_negative_sign, correct_value_negative_sign]), rtol=1e-5, ) - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.35, + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", length=2, num_parallel=1, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="gen", + component_type="generator", ) assert np.isclose(data, 2 * correct_value_negative_sign) - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=np.array([0.1, 0.1]), - L_per_km=np.array([0.35, 0.35]), + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names=np.array( + ["NA2XS(FL)2Y 3x1x300 RM/25", "NA2XS(FL)2Y 3x1x300 RM/25"] + ), length=2, num_parallel=1, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="gen", + component_type="generator", ) assert_allclose( data, @@ -239,55 +186,25 @@ def test_calculate_voltage_difference_pu_per_line_with_length(self): rtol=1e-5, ) - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.35, + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", length=1, num_parallel=2, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="gen", + component_type="generator", ) assert np.isclose(data, correct_value_negative_sign / 2) - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.35, + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", length=1, num_parallel=2, v_nom=20, s_max=50, - power_factor=0.9, - reactive_power_mode="inductive", - component_type="load", - ) - assert np.isclose(data, correct_value_positive_sign / 2) - - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.35, - length=1, - num_parallel=2, - v_nom=20, - s_max=50, - power_factor=0.9, - reactive_power_mode="capacitive", - component_type="load", - ) - assert np.isclose(data, correct_value_negative_sign / 2) - - data = tools.calculate_voltage_difference_pu_per_line_with_length( - R_per_km=0.1, - L_per_km=0.35, - length=1, - num_parallel=2, - v_nom=20, - s_max=50, - power_factor=0.9, - reactive_power_mode="capacitive", - component_type="gen", + component_type="conventional_load", ) assert np.isclose(data, correct_value_positive_sign / 2) @@ -363,12 +280,6 @@ def test_select_cable(self): self.edisgo, "mv", 5.1, - length=0, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type=None, - reactive_power_mode="inductive", ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 @@ -377,12 +288,6 @@ def test_select_cable(self): self.edisgo, "mv", 40, - length=0, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", ) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 @@ -391,12 +296,6 @@ def test_select_cable(self): self.edisgo, "lv", 0.18, - length=0, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", ) assert cable_data.name == "NAYY 4x1x150" assert num_parallel_cables == 1 @@ -407,11 +306,7 @@ def test_select_cable(self): "mv", 5.1, length=2, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", + component_type="conventional_load", ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 @@ -421,11 +316,7 @@ def test_select_cable(self): "mv", 40, length=1, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", + component_type="conventional_load", ) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 @@ -435,11 +326,7 @@ def test_select_cable(self): "lv", 0.18, length=1, - max_voltage_diff=None, - max_cables=7, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", + component_type="conventional_load", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 5 @@ -451,23 +338,7 @@ def test_select_cable(self): length=1, max_voltage_diff=0.01, max_cables=100, - power_factor=1, - component_type="load", - reactive_power_mode="inductive", - ) - assert cable_data.name == "NAYY 4x1x300" - assert num_parallel_cables == 12 - - cable_data, num_parallel_cables = tools.select_cable( - self.edisgo, - "lv", - 0.18, - length=1, - max_voltage_diff=0.01, - max_cables=100, - power_factor=None, - component_type="load", - reactive_power_mode="inductive", + component_type="conventional_load", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 14 @@ -479,29 +350,26 @@ def test_select_cable(self): length=1, max_voltage_diff=0.01, max_cables=100, - power_factor=None, - component_type="gen", - reactive_power_mode="inductive", + component_type="generator", ) assert cable_data.name == "NAYY 4x1x300" assert num_parallel_cables == 8 try: - cable_data, num_parallel_cables = tools.select_cable( + tools.select_cable( self.edisgo, "lv", 0.18, length=1, max_voltage_diff=0.01, max_cables=100, - power_factor=None, component_type="fail", - reactive_power_mode="inductive", ) except ValueError as e: assert ( str(e) == "Specified component type is not valid. " - "Must either be 'gen', 'load', 'cp' or 'hp'." + "Must either be 'generator', 'conventional_load', 'charging_point', " + "'heat_pump' or 'storage_unit'." ) def test_get_downstream_buses(self): From ec36c0b71af0c0b8ad9d1232fd5461faf4bcddb2 Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 14:53:19 +0200 Subject: [PATCH 133/141] Consider voltage drop when selecting suitable cable in connect functions --- edisgo/network/topology.py | 63 ++++++++++++++++++++++++++++---------- 1 file changed, 47 insertions(+), 16 deletions(-) diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index 2462314f0..d76944767 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -1928,7 +1928,13 @@ def connect_to_mv(self, edisgo_object, comp_data, comp_type="generator"): # avoid very short lines by limiting line length to at least 1m line_length = max(line_length, 0.001) - line_type, num_parallel = select_cable(edisgo_object, "mv", power) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) line_name = self.add_line( bus0=self.mv_grid.station.index[0], @@ -1975,13 +1981,12 @@ def connect_to_mv(self, edisgo_object, comp_data, comp_type="generator"): for dist_min_obj in conn_objects_min_stack: # do not allow connection to virtual busses if "virtual" not in dist_min_obj["repr"]: - line_type, num_parallel = select_cable(edisgo_object, "mv", power) target_obj_result = self._connect_mv_bus_to_target_object( edisgo_object=edisgo_object, bus=self.buses_df.loc[bus, :], target_obj=dist_min_obj, - line_type=line_type.name, - number_parallel_lines=num_parallel, + comp_type=comp_type, + power=power, ) if target_obj_result is not None: @@ -2448,7 +2453,12 @@ def connect_to_lv_based_on_geolocation( return comp_name def _connect_mv_bus_to_target_object( - self, edisgo_object, bus, target_obj, line_type, number_parallel_lines + self, + edisgo_object, + bus, + target_obj, + comp_type, + power, ): """ Connects given MV bus to given target object (MV line or bus). @@ -2477,11 +2487,12 @@ def _connect_mv_bus_to_target_object( * shp : :shapely:`Shapely Point object` or \ :shapely:`Shapely Line object` Geometry of line or bus to connect to. - - line_type : str - Line type to use to connect new component with. - number_parallel_lines : int - Number of parallel lines to connect new component with. + comp_type : str + Type of added component. Can be 'generator', 'charging_point', 'heat_pump' + or 'storage_unit'. + Default: 'generator'. + power : float + Nominal power of the new component to be connected. Returns ------- @@ -2598,6 +2609,13 @@ def _connect_mv_bus_to_target_object( "branch_detour_factor" ], ) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) # avoid very short lines by limiting line length to at least 1m if line_length < 0.001: line_length = 0.001 @@ -2606,8 +2624,8 @@ def _connect_mv_bus_to_target_object( bus1=bus.name, length=line_length, kind="cable", - type_info=line_type, - num_parallel=number_parallel_lines, + type_info=line_type.name, + num_parallel=num_parallel, ) # add line to equipment changes edisgo_object.results._add_line_to_equipment_changes( @@ -2624,7 +2642,7 @@ def _connect_mv_bus_to_target_object( # bus is the nearest connection point else: - # add new branch for satellite (station to station) + # add new line between new bus and closest bus line_length = geo.calc_geo_dist_vincenty( grid_topology=self, bus_source=bus.name, @@ -2633,6 +2651,13 @@ def _connect_mv_bus_to_target_object( "branch_detour_factor" ], ) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) # avoid very short lines by limiting line length to at least 1m if line_length < 0.001: line_length = 0.001 @@ -2642,8 +2667,8 @@ def _connect_mv_bus_to_target_object( bus1=bus.name, length=line_length, kind="cable", - type_info=line_type, - num_parallel=number_parallel_lines, + type_info=line_type.name, + num_parallel=num_parallel, ) # add line to equipment changes @@ -2721,7 +2746,13 @@ def _connect_to_lv_bus(self, edisgo_object, target_bus, comp_type, comp_data): line_length = max(line_length, 0.001) # get suitable line type - line_type, num_parallel = select_cable(edisgo_object, "lv", comp_data["p"]) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="lv", + apparent_power=comp_data["p"], + component_type=comp_type, + length=line_length, + ) line_name = self.add_line( bus0=target_bus, bus1=b, From a49d90746de549888d97d9b824cac360c4e75c7f Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 14:53:45 +0200 Subject: [PATCH 134/141] Adapt test that is now failing because voltage drop is too high to find suitable cable --- tests/network/test_topology.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/network/test_topology.py b/tests/network/test_topology.py index 0baf02f34..e977a5bd8 100644 --- a/tests/network/test_topology.py +++ b/tests/network/test_topology.py @@ -1720,7 +1720,7 @@ def test_connect_to_lv(self): loads_before = self.edisgo.topology.loads_df test_hp = { - "p_set": 0.3, + "p_set": 0.1, "geom": geom, "voltage_level": 6, "mvlv_subst_id": 6, @@ -1751,7 +1751,7 @@ def test_connect_to_lv(self): new_line_df.loc[new_line_df.index[0], ["bus0", "bus1"]] ) # check new heat pump - assert self.edisgo.topology.loads_df.at[comp_name, "p_set"] == 0.3 + assert self.edisgo.topology.loads_df.at[comp_name, "p_set"] == 0.1 # ############# storage unit ################# # test existing substation ID (voltage level 7) From e332851461ebc8c8092f83bd5c678201dbaee6f7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Thu, 8 Aug 2024 15:02:48 +0200 Subject: [PATCH 135/141] add holiday parameter to load_time_series_demandlib --- edisgo/io/timeseries_import.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 5d154b965..64facf904 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -297,6 +297,10 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): "day": edisgo_obj.config["demandlib"]["weekend_day"], "night": edisgo_obj.config["demandlib"]["weekend_night"], }, + "holiday": { + "day": edisgo_obj.config["demandlib"]["weekend_day"], + "night": edisgo_obj.config["demandlib"]["weekend_night"], + }, }, ) From 455272905e96563a821ef0ac629b80160a1da89f Mon Sep 17 00:00:00 2001 From: birgits Date: Thu, 8 Aug 2024 15:44:42 +0200 Subject: [PATCH 136/141] Add changes to whatsnew --- doc/whatsnew/v0-3-0.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/whatsnew/v0-3-0.rst b/doc/whatsnew/v0-3-0.rst index 7cb375542..7cf4110a1 100644 --- a/doc/whatsnew/v0-3-0.rst +++ b/doc/whatsnew/v0-3-0.rst @@ -27,3 +27,4 @@ Changes * Added a new reinforcement method that separate lv grids when the overloading is very high `#380 `_ * Move function to assign feeder to Topology class and add methods to the Grid class to get information on the feeders `#360 `_ * Added a storage operation strategy where the storage is charged when PV feed-in is higher than electricity demand of the household and discharged when electricity demand exceeds PV generation `#386 `_ +* Added an estimation of the voltage deviation over a cable when selecting a suitable cable to connect a new component `#411 `_ From d1b5aa24ecd012352fd3bfa1726e9b0aae436cbd Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 14 Aug 2024 11:46:33 +0200 Subject: [PATCH 137/141] add default holiday scaling factors for demandlib --- edisgo/config/config_timeseries_default.cfg | 2 ++ edisgo/io/timeseries_import.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/edisgo/config/config_timeseries_default.cfg b/edisgo/config/config_timeseries_default.cfg index bfb97351c..a9df61b7e 100644 --- a/edisgo/config/config_timeseries_default.cfg +++ b/edisgo/config/config_timeseries_default.cfg @@ -129,6 +129,8 @@ week_day = 0.8 week_night = 0.6 weekend_day = 0.6 weekend_night = 0.6 +holiday_day = 0.6 +holiday_night = 0.6 # tuple specifying the beginning/end of a workday (e.g. 18:00) day_start = 6:00 day_end = 22:00 diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 64facf904..6c66f33af 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -298,8 +298,8 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): "night": edisgo_obj.config["demandlib"]["weekend_night"], }, "holiday": { - "day": edisgo_obj.config["demandlib"]["weekend_day"], - "night": edisgo_obj.config["demandlib"]["weekend_night"], + "day": edisgo_obj.config["demandlib"]["holiday_day"], + "night": edisgo_obj.config["demandlib"]["holiday_night"], }, }, ) From f001ff3b173152526e999d655e1dd457db4292e4 Mon Sep 17 00:00:00 2001 From: joda9 Date: Wed, 14 Aug 2024 12:59:15 +0200 Subject: [PATCH 138/141] fix wrong testvalues --- tests/io/test_timeseries_import.py | 10 +++++++--- tests/network/test_timeseries.py | 8 ++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 17340163b..c6abb872e 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -89,16 +89,20 @@ def test_feedin_oedb(self): def test_load_time_series_demandlib(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) - timeindex = pd.date_range("1/1/2018", periods=7000, freq="H") + timeindex = pd.date_range("1/1/2018", periods=8760, freq="H") load = timeseries_import.load_time_series_demandlib(edisgo, timeindex) assert ( load.columns == ["cts", "residential", "agricultural", "industrial"] ).all() - assert len(load) == 7000 + assert len(load) == 8760 assert np.isclose(load.loc[timeindex[453], "cts"], 8.33507e-05) assert np.isclose(load.loc[timeindex[13], "residential"], 1.73151e-04) assert np.isclose(load.loc[timeindex[6328], "agricultural"], 1.01346e-04) - assert np.isclose(load.loc[timeindex[4325], "industrial"], 9.91768e-05) + assert np.isclose(load.loc[timeindex[4325], "industrial"], 9.87654320e-05) + assert np.isclose(load.sum()["cts"], 1.0) + assert np.isclose(load.sum()["residential"], 1.0) + assert np.isclose(load.sum()["agricultural"], 1.0) + assert np.isclose(load.sum()["industrial"], 1.0) @pytest.mark.local def test_cop_oedb(self): diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 4666a836a..2e8b717b4 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1565,9 +1565,9 @@ def test_predefined_conventional_loads_by_sector(self, caplog): index=index, columns=["cts", "residential", "agricultural", "industrial"], data=[ - [0.0000597, 0.0000782, 0.0000654, 0.0000992], - [0.0000526, 0.0000563, 0.0000611, 0.0000992], - [0.0000459, 0.0000451, 0.0000585, 0.0000992], + [0.000059711, 0.0000782190, 0.00006540, 0.00009876], + [0.000052590, 0.0000563428, 0.00006110, 0.00009876], + [0.000045927, 0.0000451043, 0.00005843, 0.00009876], ], ) @@ -1656,7 +1656,7 @@ def test_predefined_conventional_loads_by_sector(self, caplog): self.edisgo.timeseries.loads_active_power[ "Load_industrial_LVGrid_6_1" ].values, - [0.05752256] * 3, + [0.05728395] * 3, ).all() assert np.isclose( self.edisgo.timeseries.loads_active_power.loc[ From 60be4ff36502f13b3f01b6bdb9f6ec82731b39ae Mon Sep 17 00:00:00 2001 From: joda9 <66819219+joda9@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:04:55 +0200 Subject: [PATCH 139/141] Update demandlib version --- rtd_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtd_requirements.txt b/rtd_requirements.txt index 3900ea862..3344a499e 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,5 +1,5 @@ dash < 2.9.0 -demandlib < 0.2.0 +demandlib egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash From bfe8d13e708a666085b4f967898f2ad19e7f3696 Mon Sep 17 00:00:00 2001 From: joda9 Date: Thu, 15 Aug 2024 19:27:28 +0200 Subject: [PATCH 140/141] move find_meshes to topology.py --- edisgo/network/topology.py | 29 +++++++++++++++++++++++++++ edisgo/opf/powermodels_opf.py | 33 +++---------------------------- tests/network/test_topology.py | 25 +++++++++++++++++++++++ tests/opf/test_powermodels_opf.py | 27 +------------------------ 4 files changed, 58 insertions(+), 56 deletions(-) diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index 2462314f0..3569a27c5 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -3092,6 +3092,9 @@ def check_integrity(self): f"optimisation." ) + # check for meshed grid + self.find_meshes() + def assign_feeders(self, mode: str = "grid_feeder"): """ Assigns MV or LV feeder to each bus and line, depending on the `mode`. @@ -3160,3 +3163,29 @@ def aggregate_lv_grid_at_station(self, lv_grid_id: int | str) -> None: def __repr__(self): return f"Network topology {self.id}" + + def find_meshes(edisgo_obj) -> list[list[int]] | None: + """ + Find all meshes in the grid. + + Parameters + ---------- + edisgo_obj : EDisGo + EDisGo object. + + Returns + ------- + Optional[List[List[int]]] + List of all meshes in the grid. + Each mesh is represented as a list of node indices. + If no meshes are found, None is returned. + """ + meshes = nx.cycle_basis(edisgo_obj.to_graph()) + if meshes: + logger.warning( + "Grid contains mesh(es). This might cause problems in " + "the power flow or optimisation." + ) + return meshes + else: + return None diff --git a/edisgo/opf/powermodels_opf.py b/edisgo/opf/powermodels_opf.py index 52299c80c..85da160a8 100644 --- a/edisgo/opf/powermodels_opf.py +++ b/edisgo/opf/powermodels_opf.py @@ -4,44 +4,17 @@ import subprocess import sys -from typing import List, Optional +from typing import Optional -import networkx as nx import numpy as np from edisgo.flex_opt import exceptions from edisgo.io.powermodels_io import from_powermodels +from edisgo.network.topology import Topology logger = logging.getLogger(__name__) -def find_meshes(edisgo_obj) -> Optional[List[List[int]]]: - """ - Find all meshes in the grid. - - Parameters - ---------- - edisgo_obj : EDisGo - EDisGo object. - - Returns - ------- - Optional[List[List[int]]] - List of all meshes in the grid. - Each mesh is represented as a list of node indices. - If no meshes are found, None is returned. - """ - meshes = nx.cycle_basis(edisgo_obj.to_graph()) - if meshes: - logger.warning( - "Grid contains mesh(es). This might cause problems in " - "the power flow or optimisation." - ) - return meshes - else: - return None - - def pm_optimize( edisgo_obj, s_base: int = 1, @@ -135,7 +108,7 @@ def pm_optimize( Default: True. """ - find_meshes(edisgo_obj) + Topology.find_meshes(edisgo_obj) opf_dir = os.path.dirname(os.path.abspath(__file__)) solution_dir = os.path.join(opf_dir, "opf_solutions") pm, hv_flex_dict = edisgo_obj.to_powermodels( diff --git a/tests/network/test_topology.py b/tests/network/test_topology.py index 0baf02f34..3ef11d949 100644 --- a/tests/network/test_topology.py +++ b/tests/network/test_topology.py @@ -1909,3 +1909,28 @@ def test_check_integrity(self, caplog): assert "There are lines with very short line lengths" in caplog.text assert "Very small values for impedance of lines" and line in caplog.text caplog.clear() + + def test_find_meshes(self, caplog: pytest.LogCaptureFixture): + meshes = Topology.find_meshes(self.edisgo) + assert not meshes + self.edisgo.topology.add_line( + "Bus_GeneratorFluctuating_2", + "Bus_GeneratorFluctuating_6", + 0.1, + x=0.1, + r=0.1, + ) + meshes = Topology.find_meshes(self.edisgo) + assert len(meshes) == 1 + assert "Bus_GeneratorFluctuating_2" in meshes[0] + assert "Bus_GeneratorFluctuating_6" in meshes[0] + self.edisgo.topology.add_line( + "Bus_BranchTee_LVGrid_2_3", "Bus_BranchTee_LVGrid_3_3", 0.1, x=0.1, r=0.1 + ) + meshes = Topology.find_meshes(self.edisgo) + assert len(meshes) == 2 + assert "Bus_BranchTee_LVGrid_2_3" in meshes[1] + assert ( + "Grid contains mesh(es). This might cause problems" + " in the power flow or optimisation." in caplog.text + ) diff --git a/tests/opf/test_powermodels_opf.py b/tests/opf/test_powermodels_opf.py index 2efb7d11c..4f6482f97 100644 --- a/tests/opf/test_powermodels_opf.py +++ b/tests/opf/test_powermodels_opf.py @@ -3,7 +3,7 @@ import pytest from edisgo import EDisGo -from edisgo.opf.powermodels_opf import find_meshes, pm_optimize +from edisgo.opf.powermodels_opf import pm_optimize from edisgo.tools.tools import aggregate_district_heating_components @@ -337,28 +337,3 @@ def test_pm_optimize(self): ) ) ) - - def test_find_meshes(self, caplog: pytest.LogCaptureFixture): - meshes = find_meshes(self.edisgo) - assert not meshes - self.edisgo.topology.add_line( - "Bus_GeneratorFluctuating_2", - "Bus_GeneratorFluctuating_6", - 0.1, - x=0.1, - r=0.1, - ) - meshes = find_meshes(self.edisgo) - assert len(meshes) == 1 - assert "Bus_GeneratorFluctuating_2" in meshes[0] - assert "Bus_GeneratorFluctuating_6" in meshes[0] - self.edisgo.topology.add_line( - "Bus_BranchTee_LVGrid_2_3", "Bus_BranchTee_LVGrid_3_3", 0.1, x=0.1, r=0.1 - ) - meshes = find_meshes(self.edisgo) - assert len(meshes) == 2 - assert "Bus_BranchTee_LVGrid_2_3" in meshes[1] - assert ( - "Grid contains mesh(es). This might cause problems" - " in the power flow or optimisation." in caplog.text - ) From fc52d08247040db75ceaa273c850cc699dccd642 Mon Sep 17 00:00:00 2001 From: birgits Date: Wed, 21 Aug 2024 14:53:00 +0200 Subject: [PATCH 141/141] Adapt warning message --- edisgo/network/topology.py | 6 ++++-- tests/network/test_topology.py | 5 +---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index 3569a27c5..d489641b0 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -3183,8 +3183,10 @@ def find_meshes(edisgo_obj) -> list[list[int]] | None: meshes = nx.cycle_basis(edisgo_obj.to_graph()) if meshes: logger.warning( - "Grid contains mesh(es). This might cause problems in " - "the power flow or optimisation." + "Grid contains mesh(es). Be aware, that the grid expansion methodology " + "is currently not able to handle meshes. Further, the optimisation of " + "flexibility dispatch is not exact in case of meshed grids, but can " + "still be used." ) return meshes else: diff --git a/tests/network/test_topology.py b/tests/network/test_topology.py index 3ef11d949..1f0c2e9ad 100644 --- a/tests/network/test_topology.py +++ b/tests/network/test_topology.py @@ -1930,7 +1930,4 @@ def test_find_meshes(self, caplog: pytest.LogCaptureFixture): meshes = Topology.find_meshes(self.edisgo) assert len(meshes) == 2 assert "Bus_BranchTee_LVGrid_2_3" in meshes[1] - assert ( - "Grid contains mesh(es). This might cause problems" - " in the power flow or optimisation." in caplog.text - ) + assert "Grid contains mesh(es)." in caplog.text