diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml index 257514e4..c0cb5cb7 100644 --- a/.github/workflows/pytest.yaml +++ b/.github/workflows/pytest.yaml @@ -43,4 +43,4 @@ jobs: pip install ${{ matrix.env }} - name: Test with pytest ${{ matrix.env }} run: | - pytest \ No newline at end of file + pytest diff --git a/docs/sphinx/source/changelog.rst b/docs/sphinx/source/changelog.rst index 3daa0938..2248c8f7 100644 --- a/docs/sphinx/source/changelog.rst +++ b/docs/sphinx/source/changelog.rst @@ -1,5 +1,6 @@ RdTools Change Log ================== +.. include:: changelog/v2.1.5.rst .. include:: changelog/v2.1.4.rst .. include:: changelog/v2.1.3.rst .. include:: changelog/v2.1.2.rst diff --git a/docs/sphinx/source/changelog/v2.1.5.rst b/docs/sphinx/source/changelog/v2.1.5.rst new file mode 100644 index 00000000..2cfa8fa3 --- /dev/null +++ b/docs/sphinx/source/changelog/v2.1.5.rst @@ -0,0 +1,13 @@ +************************* +v2.1.5 (May 16, 2023) +************************* + +Bug Fixes +--------- +* Add support for pandas 2.0 (:issue:`361`, :pull:`362`) + + +Contributors +------------ +* Kevin Anderson (:ghuser:`kanderso-nrel`) +* Michael Deceglie (:ghuser:`mdeceglie`) diff --git a/rdtools/degradation.py b/rdtools/degradation.py index 7a6bee70..b97bc519 100644 --- a/rdtools/degradation.py +++ b/rdtools/degradation.py @@ -36,7 +36,7 @@ def degradation_ols(energy_normalized, confidence_level=68.2): # calculate a years column as x value for regression, ignoring leap years day_diffs = (df.index - df.index[0]) - df['days'] = day_diffs.astype('timedelta64[s]') / (60 * 60 * 24) + df['days'] = day_diffs / pd.Timedelta('1d') df['years'] = df.days / 365.0 # add intercept-constant to the exogeneous variable @@ -121,7 +121,7 @@ def degradation_classical_decomposition(energy_normalized, # calculate a years column as x value for regression, ignoring leap years day_diffs = (df.index - df.index[0]) - df['days'] = day_diffs.astype('timedelta64[s]') / (60 * 60 * 24) + df['days'] = day_diffs / pd.Timedelta('1d') df['years'] = df.days / 365.0 # Compute yearly rolling mean to isolate trend component using @@ -266,7 +266,7 @@ def degradation_year_on_year(energy_normalized, recenter=True, tolerance=pd.Timedelta('8D') ) - df['time_diff_years'] = (df.dt - df.dt_right).astype('timedelta64[h]') / 8760.0 + df['time_diff_years'] = (df.dt - df.dt_right) / pd.Timedelta('365d') df['yoy'] = 100.0 * (df.energy - df.energy_right) / (df.time_diff_years) df.index = df.dt diff --git a/rdtools/filtering.py b/rdtools/filtering.py index c72f0667..1fa662f6 100644 --- a/rdtools/filtering.py +++ b/rdtools/filtering.py @@ -424,8 +424,9 @@ def logic_clip_filter(power_ac, # series sampling frequency is less than 95% consistent. _check_data_sampling_frequency(power_ac) # Get the sampling frequency of the time series - time_series_sampling_frequency = power_ac.index.to_series().diff()\ - .astype('timedelta64[m]').mode()[0] + time_series_sampling_frequency = ( + power_ac.index.to_series().diff() / pd.Timedelta('60s') + ).mode()[0] # Make copies of the original inputs for the cases that the data is # changes for clipping evaluation original_time_series_sampling_frequency = time_series_sampling_frequency @@ -651,8 +652,7 @@ def xgboost_clip_filter(power_ac, # series sampling frequency is less than 95% consistent. _check_data_sampling_frequency(power_ac) # Get the most common sampling frequency - sampling_frequency = int(power_ac.index.to_series().diff() - .astype('timedelta64[m]').mode()[0]) + sampling_frequency = int((power_ac.index.to_series().diff() / pd.Timedelta('60s')).mode()[0]) freq_string = str(sampling_frequency) + "T" # Min-max normalize # Resample the series based on the most common sampling frequency diff --git a/rdtools/test/availability_test.py b/rdtools/test/availability_test.py index 7197608f..2b6608c2 100644 --- a/rdtools/test/availability_test.py +++ b/rdtools/test/availability_test.py @@ -162,7 +162,7 @@ def difficult_data(): # generate a plausible clear-sky power signal times = pd.date_range('2019-01-01', '2019-01-06', freq='15min', - tz='US/Eastern', closed='left') + tz='US/Eastern') location = pvlib.location.Location(40, -80) clearsky = location.get_clearsky(times, model='haurwitz') # just scale GHI to power for simplicity diff --git a/rdtools/test/degradation_test.py b/rdtools/test/degradation_test.py index 9bfb1e6c..a66e4cdd 100644 --- a/rdtools/test/degradation_test.py +++ b/rdtools/test/degradation_test.py @@ -34,7 +34,7 @@ def get_corr_energy(cls, rd, input_freq): freq = input_freq x = pd.date_range(start=start, end=end, freq=freq) - day_deltas = (x - x[0]).astype('timedelta64[s]') / (60.0 * 60.0 * 24) + day_deltas = (x - x[0]) / pd.Timedelta('1d') noise = (np.random.rand(len(day_deltas)) - 0.5) / 1e3 y = 1 + daily_rd * day_deltas + noise diff --git a/setup.py b/setup.py index 12951aa0..a396b793 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ 'pytest >= 3.6.3', 'coverage', 'flake8', - 'nbval', + 'nbval==0.9.6', # https://github.com/computationalmodelling/nbval/issues/194 'pytest-mock', ]