Skip to content

Commit

Permalink
adjust tests to new frequency conventions and address some more downc…
Browse files Browse the repository at this point in the history
…asting warnings
  • Loading branch information
martin-springer committed Dec 18, 2024
1 parent 43dffd4 commit d569509
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 33 deletions.
11 changes: 6 additions & 5 deletions rdtools/analysis_chains.py
Original file line number Diff line number Diff line change
Expand Up @@ -562,17 +562,16 @@ def _call_clearsky_filter(filter_string):
warnings.warn(
"ad_hoc_filter contains NaN values; setting to False (excluding)"
)
ad_hoc_filter = ad_hoc_filter.fillna(False)
ad_hoc_filter.loc[ad_hoc_filter.isnull()] = False

if not filter_components.index.equals(ad_hoc_filter.index):
warnings.warn(
"ad_hoc_filter index does not match index of other filters; missing "
"values will be set to True (kept). Align the index with the index "
"of the filter_components attribute to prevent this warning"
)
ad_hoc_filter = ad_hoc_filter.reindex(filter_components.index).fillna(
True
)
ad_hoc_filter = ad_hoc_filter.reindex(filter_components.index)
ad_hoc_filter.loc[ad_hoc_filter.isnull()] = True

filter_components["ad_hoc_filter"] = ad_hoc_filter

Expand Down Expand Up @@ -654,6 +653,7 @@ def _aggregated_filter(self, aggregated, case):
"aggregated ad_hoc_filter contains NaN values; setting to False (excluding)"
)
ad_hoc_filter_aggregated = ad_hoc_filter_aggregated.fillna(False)
ad_hoc_filter_aggregated.loc[ad_hoc_filter_aggregated.isnull()] = False

if not filter_components_aggregated.index.equals(
ad_hoc_filter_aggregated.index
Expand All @@ -666,7 +666,8 @@ def _aggregated_filter(self, aggregated, case):
)
ad_hoc_filter_aggregated = ad_hoc_filter_aggregated.reindex(
filter_components_aggregated.index
).fillna(True)
)
ad_hoc_filter_aggregated.loc[ad_hoc_filter_aggregated.isnull()] = True

filter_components_aggregated["ad_hoc_filter"] = ad_hoc_filter_aggregated

Expand Down
2 changes: 1 addition & 1 deletion rdtools/availability.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def _calc_loss_subsystem(self, low_threshold, relative_sizes,
subsystem_fraction = relative_sizes / relative_sizes.sum()
smallest_delta = (
power_subsystem.le(low_threshold)
.replace(False, np.nan)
.replace(False, None)
.multiply(subsystem_fraction)
.min(axis=1)
.astype(float)
Expand Down
8 changes: 4 additions & 4 deletions rdtools/test/analysis_chains_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def degradation_trend(basic_parameters, cs_input):
from degradation_test import DegradationTestCase

rd = -0.05
input_freq = "H"
input_freq = "h"
degradation_trend = DegradationTestCase.get_corr_energy(rd, input_freq)
tz = cs_input["pvlib_location"].tz
return degradation_trend.tz_localize(tz)
Expand All @@ -56,7 +56,7 @@ def sensor_parameters(basic_parameters, degradation_trend):
basic_parameters["pv"] = power
basic_parameters["poa_global"] = poa_global
basic_parameters["temperature_ambient"] = temperature_ambient
basic_parameters["interp_freq"] = "H"
basic_parameters["interp_freq"] = "h"
return basic_parameters


Expand Down Expand Up @@ -143,7 +143,7 @@ def test_interpolation(basic_parameters, degradation_trend):
basic_parameters["temperature_cell"] = dummy_series
basic_parameters["windspeed"] = dummy_series
basic_parameters["power_expected"] = dummy_series
basic_parameters["interp_freq"] = "H"
basic_parameters["interp_freq"] = "h"

rd_analysis = TrendAnalysis(**basic_parameters)

Expand Down Expand Up @@ -404,7 +404,7 @@ def test_filter_ad_hoc_warnings(workflow, sensor_parameters):

# warning about NaNs
ad_hoc_filter = pd.Series(True, index=sensor_parameters["pv"].index)
ad_hoc_filter.iloc[10] = np.nan
ad_hoc_filter.iloc[10] = None # np.nan
rd_analysis.filter_params["ad_hoc_filter"] = ad_hoc_filter
with pytest.warns(
UserWarning, match="ad_hoc_filter contains NaN values; setting to False"
Expand Down
31 changes: 17 additions & 14 deletions rdtools/test/degradation_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,20 +53,20 @@ def setUpClass(cls):
# define module constants and parameters

# All frequencies
cls.list_all_input_freq = ["MS", "M", "W", "D", "h", "min", "s", "Irregular_D"]
cls.list_all_input_freq = ["MS", "ME", "W", "D", "h", "min", "s", "Irregular_D"]

# Allowed frequencies for degradation_ols
cls.list_ols_input_freq = ["MS", "M", "W", "D", "h", "min", "s", "Irregular_D"]
cls.list_ols_input_freq = ["MS", "ME", "W", "D", "h", "min", "s", "Irregular_D"]

'''
Allowed frequencies for degradation_classical_decomposition
in principle CD works on higher frequency data but that makes the
tests painfully slow
'''
cls.list_CD_input_freq = ['MS', 'M', 'W', 'D']
cls.list_CD_input_freq = ["MS", "ME", "W", "D"]

# Allowed frequencies for degradation_year_on_year
cls.list_YOY_input_freq = ['MS', 'M', 'W', 'D', 'Irregular_D']
cls.list_YOY_input_freq = ["MS", "ME", "W", "D", "Irregular_D"]

cls.rd = -0.005

Expand Down Expand Up @@ -184,16 +184,19 @@ def test_usage_of_points(self):
self.assertTrue((np.sum(rd_result[2]['usage_of_points'])) == 1462)


@pytest.mark.parametrize('start,end,freq', [
('2014-01-01', '2015-12-31', 'D'), # no leap day
('2015-01-01', '2016-12-31', 'D'), # leap day included in index
('2015-01-01', '2016-12-29', '7D'), # leap day in period but not in index
('2016-06-01', '2018-05-31', 'D'), # leap year, but no leap day in period
# ('2016-02-29', '2018-02-28', 'd'), # starts on leap day (doesn't work)
('2014-03-01', '2016-02-29', 'D'), # ends on leap day
('2015-01-01', '2016-12-31', 'M'), # month end
('2015-01-01', '2016-12-31', 'MS'), # month start
])
@pytest.mark.parametrize(
"start,end,freq",
[
("2014-01-01", "2015-12-31", "D"), # no leap day
("2015-01-01", "2016-12-31", "D"), # leap day included in index
("2015-01-01", "2016-12-29", "7D"), # leap day in period but not in index
("2016-06-01", "2018-05-31", "D"), # leap year, but no leap day in period
# ('2016-02-29', '2018-02-28', 'd'), # starts on leap day (doesn't work)
("2014-03-01", "2016-02-29", "D"), # ends on leap day
("2015-01-01", "2016-12-31", "ME"), # month end
("2015-01-01", "2016-12-31", "MS"), # month start
],
)
def test_yoy_two_years_error(start, end, freq):
# GH 339
times = pd.date_range(start, end, freq=freq)
Expand Down
2 changes: 1 addition & 1 deletion rdtools/test/energy_from_power_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def test_energy_from_power_single_value_with_target():
times = pd.date_range("2019-01-01", freq="15min", periods=1)
power = pd.Series([100.0], index=times)
expected_result = pd.Series([100.0], index=times, name="energy_Wh")
result = energy_from_power(power, target_frequency="H")
result = energy_from_power(power, target_frequency="h")
pd.testing.assert_series_equal(result, expected_result)


Expand Down
18 changes: 10 additions & 8 deletions rdtools/test/filtering_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,19 +129,22 @@ def generate_power_time_series_no_clipping():
def generate_power_time_series_irregular_intervals():
power_datetime_index = pd.Series(np.arange(1, 62))
# Add datetime index to second series
time_range_1 = pd.date_range('2016-12-02T11:00:00.000Z',
'2017-06-06T07:00:00.000Z', freq='1T')
time_range_1 = pd.date_range(
"2016-12-02T11:00:00.000Z", "2017-06-06T07:00:00.000Z", freq="1min"
)
power_datetime_index.index = pd.to_datetime(time_range_1[:61])
power_datetime_index_2 = pd.Series(np.arange(100, 200))
time_range_2 = pd.date_range(power_datetime_index.index.max(),
'2017-06-06T07:00:00.000Z', freq='15T')
time_range_2 = pd.date_range(
power_datetime_index.index.max(), "2017-06-06T07:00:00.000Z", freq="15min"
)
power_datetime_index_2.index = pd.to_datetime(time_range_2[:100])
power_datetime_index_2 = power_datetime_index_2.iloc[1:]
power_datetime_index = pd.concat([power_datetime_index,
power_datetime_index_2])
power_datetime_index_3 = pd.Series(list(reversed(np.arange(100, 200))))
time_range_3 = pd.date_range(power_datetime_index.index.max(),
'2017-06-06T07:00:00.000Z', freq='5T')
time_range_3 = pd.date_range(
power_datetime_index.index.max(), "2017-06-06T07:00:00.000Z", freq="5min"
)
power_datetime_index_3.index = pd.to_datetime(time_range_3[:100])
power_datetime_index_3 = power_datetime_index_3.iloc[1:]
power_datetime_index = pd.concat([power_datetime_index,
Expand All @@ -157,8 +160,7 @@ def generate_power_time_series_one_min_intervals():
power_datetime_index = pd.concat([power_datetime_index,
power_datetime_index[::-1]])
# Add datetime index to second series
time_range = pd.date_range('2016-12-02T11:00:00.000Z',
'2017-06-06T07:00:00.000Z', freq='1T')
time_range = pd.date_range("2016-12-02T11:00:00.000Z", "2017-06-06T07:00:00.000Z", freq="1min")
power_datetime_index.index = pd.to_datetime(time_range[:100])
# Note: Power is expected to be Series object with a datetime index.
return power_datetime_index
Expand Down

0 comments on commit d569509

Please sign in to comment.