Skip to content

Commit

Permalink
formatting conftest.py and soiling_test.py
Browse files Browse the repository at this point in the history
  • Loading branch information
nmoyer committed Aug 2, 2024
1 parent c08a0e9 commit 35a3ec9
Show file tree
Hide file tree
Showing 2 changed files with 751 additions and 366 deletions.
118 changes: 69 additions & 49 deletions rdtools/test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@

import rdtools

rdtools_base_version = \
parse_version(parse_version(rdtools.__version__).base_version)
rdtools_base_version = parse_version(parse_version(rdtools.__version__).base_version)


# decorator takes one argument: the base version for which it should fail
Expand All @@ -26,17 +25,20 @@ def wrapper(func):
def inner(*args, **kwargs):
# fail if the version is too high
if rdtools_base_version >= parse_version(version):
pytest.fail('the tested function is scheduled to be '
'removed in %s' % version)
pytest.fail(
"the tested function is scheduled to be " "removed in %s" % version
)
# otherwise return the function to be executed
else:
return func(*args, **kwargs)

return inner

return wrapper


def assert_isinstance(obj, klass):
assert isinstance(obj, klass), f'got {type(obj)}, expected {klass}'
assert isinstance(obj, klass), f"got {type(obj)}, expected {klass}"


def assert_warnings(messages, record):
Expand All @@ -58,17 +60,19 @@ def assert_warnings(messages, record):
assert found_match, f"warning '{pattern}' not in {warning_messages}"


requires_pvlib_below_090 = \
pytest.mark.skipif(parse_version(pvlib.__version__) > parse_version('0.8.1'),
reason='requires pvlib <= 0.8.1')
requires_pvlib_below_090 = pytest.mark.skipif(
parse_version(pvlib.__version__) > parse_version("0.8.1"),
reason="requires pvlib <= 0.8.1",
)


# %% Soiling fixtures


@pytest.fixture()
def soiling_times():
tz = 'Etc/GMT+7'
times = pd.date_range('2019/01/01', '2019/03/16', freq='D', tz=tz)
tz = "Etc/GMT+7"
times = pd.date_range("2019/01/01", "2019/03/16", freq="D", tz=tz)
return times


Expand All @@ -85,35 +89,42 @@ def soiling_normalized_daily(soiling_times):

return normalized_daily


@pytest.fixture()
def soiling_normalized_daily_with_neg_shifts(soiling_times):
interval_1_v1 = 1 - 0.005 * np.arange(0, 15, 1)
interval_1_v2 = (0.9 - 0.005 * 15) - 0.005 * np.arange(0, 10, 1)
interval_2 = 1 - 0.002 * np.arange(0, 25, 1)
interval_3_v1 = 1 - 0.001 * np.arange(0, 10, 1)
interval_3_v2 = (0.95 - 0.001 * 10) - 0.001 * np.arange(0, 15, 1)
profile = np.concatenate((interval_1_v1, interval_1_v2, interval_2, interval_3_v1, interval_3_v2))
profile = np.concatenate(
(interval_1_v1, interval_1_v2, interval_2, interval_3_v1, interval_3_v2)
)
np.random.seed(1977)
noise = 0.01 * np.random.rand(75)
normalized_daily = pd.Series(data=profile, index=soiling_times)
normalized_daily = normalized_daily + noise

return normalized_daily


@pytest.fixture()
def soiling_normalized_daily_with_piecewise_slope(soiling_times):
interval_1_v1 = 1 - 0.002 * np.arange(0, 20, 1)
interval_1_v2 = (1 - 0.002 * 20) - 0.007 * np.arange(0, 20, 1)
interval_2_v1 = 1 - 0.01 * np.arange(0, 20, 1)
interval_2_v2 = (1 - 0.01 * 20) - 0.001 * np.arange(0, 15, 1)
profile = np.concatenate((interval_1_v1, interval_1_v2, interval_2_v1, interval_2_v2))
profile = np.concatenate(
(interval_1_v1, interval_1_v2, interval_2_v1, interval_2_v2)
)
np.random.seed(1977)
noise = 0.01 * np.random.rand(75)
normalized_daily = pd.Series(data=profile, index=soiling_times)
normalized_daily = normalized_daily + noise

return normalized_daily


@pytest.fixture()
def soiling_insolation(soiling_times):
insolation = np.empty((75,))
Expand All @@ -128,8 +139,8 @@ def soiling_insolation(soiling_times):

@pytest.fixture()
def cods_times():
tz = 'Etc/GMT+7'
cods_times = pd.date_range('2019/01/01', '2021/01/01', freq='D', tz=tz)
tz = "Etc/GMT+7"
cods_times = pd.date_range("2019/01/01", "2021/01/01", freq="D", tz=tz)
return cods_times


Expand All @@ -141,7 +152,9 @@ def cods_normalized_daily_wo_noise(cods_times):
interval_3 = 1 - 0.001 * np.arange(0, 25, 1)
profile = np.concatenate((interval_1, interval_2, interval_3))
repeated_profile = np.concatenate([profile for _ in range(int(np.ceil(N / 75)))])
cods_normalized_daily_wo_noise = pd.Series(data=repeated_profile[:N], index=cods_times)
cods_normalized_daily_wo_noise = pd.Series(
data=repeated_profile[:N], index=cods_times
)
return cods_normalized_daily_wo_noise


Expand All @@ -159,18 +172,21 @@ def cods_normalized_daily_small_soiling(cods_normalized_daily_wo_noise):
N = len(cods_normalized_daily_wo_noise)
np.random.seed(1977)
noise = 1 + 0.02 * (np.random.rand(N) - 0.5)
cods_normalized_daily_small_soiling = cods_normalized_daily_wo_noise.apply(
lambda row: 1-(1-row)*0.1) * noise
cods_normalized_daily_small_soiling = (
cods_normalized_daily_wo_noise.apply(lambda row: 1 - (1 - row) * 0.1) * noise
)
return cods_normalized_daily_small_soiling


# %% Availability fixtures

ENERGY_PARAMETER_SPACE = list(itertools.product(
[0, np.nan], # outage value for power
[0, np.nan, None], # value for cumulative energy (None means real value)
[0, 0.25, 0.5, 0.75, 1.0], # fraction of comms outage that is power outage
))
ENERGY_PARAMETER_SPACE = list(
itertools.product(
[0, np.nan], # outage value for power
[0, np.nan, None], # value for cumulative energy (None means real value)
[0, 0.25, 0.5, 0.75, 1.0], # fraction of comms outage that is power outage
)
)
# display names for the test cases. default is just 0..N
ENERGY_PARAMETER_IDS = ["_".join(map(str, p)) for p in ENERGY_PARAMETER_SPACE]

Expand All @@ -180,20 +196,23 @@ def _generate_energy_data(power_value, energy_value, outage_fraction):
Generate an artificial mixed communication/power outage.
"""
# a few days of clearsky irradiance for creating a plausible power signal
times = pd.date_range('2019-01-01', '2019-01-15 23:59', freq='15min',
tz='US/Eastern')
times = pd.date_range(
"2019-01-01", "2019-01-15 23:59", freq="15min", tz="US/Eastern"
)
location = pvlib.location.Location(40, -80)
# use haurwitz to avoid dependency on `tables`
clearsky = location.get_clearsky(times, model='haurwitz')
clearsky = location.get_clearsky(times, model="haurwitz")

# just set base inverter power = ghi+clipping for simplicity
base_power = clearsky['ghi'].clip(upper=0.8*clearsky['ghi'].max())

inverter_power = pd.DataFrame({
'inv0': base_power,
'inv1': base_power*0.7,
'inv2': base_power*1.3,
})
base_power = clearsky["ghi"].clip(upper=0.8 * clearsky["ghi"].max())

inverter_power = pd.DataFrame(
{
"inv0": base_power,
"inv1": base_power * 0.7,
"inv2": base_power * 1.3,
}
)
expected_power = inverter_power.sum(axis=1)
# dawn/dusk points
expected_power[expected_power < 10] = 0
Expand All @@ -202,10 +221,10 @@ def _generate_energy_data(power_value, energy_value, outage_fraction):
expected_power *= 1.05 + np.random.normal(0, scale=0.05, size=len(times))

# calculate what part of the comms outage is a power outage
comms_outage = slice('2019-01-03 00:00', '2019-01-06 00:00')
comms_outage = slice("2019-01-03 00:00", "2019-01-06 00:00")
start = times.get_loc(comms_outage.start)
stop = times.get_loc(comms_outage.stop)
power_outage = slice(start, int(start + outage_fraction * (stop-start)))
power_outage = slice(start, int(start + outage_fraction * (stop - start)))
expected_loss = inverter_power.iloc[power_outage, :].sum().sum() / 4
inverter_power.iloc[power_outage, :] = 0
meter_power = inverter_power.sum(axis=1)
Expand All @@ -219,14 +238,16 @@ def _generate_energy_data(power_value, energy_value, outage_fraction):
meter_energy[comms_outage] = energy_value
inverter_power.loc[comms_outage, :] = power_value

expected_type = 'real' if outage_fraction > 0 else 'comms'
expected_type = "real" if outage_fraction > 0 else "comms"

return (meter_power,
meter_energy,
inverter_power,
expected_power,
expected_loss,
expected_type)
return (
meter_power,
meter_energy,
inverter_power,
expected_power,
expected_loss,
expected_type,
)


@pytest.fixture(params=ENERGY_PARAMETER_SPACE, ids=ENERGY_PARAMETER_IDS)
Expand Down Expand Up @@ -254,13 +275,12 @@ def energy_data_comms_single():

@pytest.fixture
def availability_analysis_object(energy_data_outage_single):
(meter_power,
meter_energy,
inverter_power,
expected_power,
_, _) = energy_data_outage_single

aa = rdtools.availability.AvailabilityAnalysis(meter_power, inverter_power, meter_energy,
expected_power)
(meter_power, meter_energy, inverter_power, expected_power, _, _) = (
energy_data_outage_single
)

aa = rdtools.availability.AvailabilityAnalysis(
meter_power, inverter_power, meter_energy, expected_power
)
aa.run()
return aa
Loading

0 comments on commit 35a3ec9

Please sign in to comment.