Skip to content

Commit

Permalink
Merge pull request #25 from davidusb-geek/dev/mockgetrequest
Browse files Browse the repository at this point in the history
Dev/mockgetrequest
  • Loading branch information
davidusb-geek authored Oct 3, 2022
2 parents 61def2d + cb2988e commit d168cbf
Show file tree
Hide file tree
Showing 10 changed files with 105 additions and 14 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
- Added more detailed examples to the forecast module documentation.
- Improved handling of datatime indexes in DataFrames on forecast module.
- Added warning messages if passed list values contains non numeric items.
- Added missing unittests for forecast module with request.get dependencies using MagicMock.
- Added the Solar.Forecast method.

## [0.3.19] - 2022-09-14
### Fix
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,8 @@ Here is the list of the other additional dictionnary keys that can be passed at

- `solcast_rooftop_id` for the ID of your rooftop for the SolCast service implementation.

- `solar_forecast_kwp` for the PV peak installed power in kW used for the solar.forecast API call.

### A naive Model Predictive Controller

A MPC controller was introduced in v0.3.0. This is an informal/naive representation of a MPC controller.
Expand Down
Binary file added data/test_response_scrapper_method.pbz2
Binary file not shown.
Binary file added data/test_response_solarforecast_method.pbz2
Binary file not shown.
Binary file added data/test_response_solcast_method.pbz2
Binary file not shown.
7 changes: 7 additions & 0 deletions docs/forecasts.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ For example:
curl -i -H 'Content-Type:application/json' -X POST -d '{"solcast_rooftop_id":"<your_system_id>","solcast_api_key":"<your_secret_api_key>"}' http://localhost:5000/action/dayahead-optim
```

A thrd method uses the Solar.Forecast service. You will need to set `method=solar.forecast` and use just one parameter `solar_forecast_kwp` (the PV peak installed power in kW) that should be passed at runtime.

For example, for a 5 kWp installation:
```
curl -i -H 'Content-Type:application/json' -X POST -d '{"solar_forecast_kwp":5}' http://localhost:5000/action/dayahead-optim
```

## Load power forecast

The default method for load forecast is a naive method, also called persistence. This is obtained using `method=naive`. This method simply assumes that the forecast for a future period will be equal to the observed values in a past period. The past period is controlled using parameter `delta_forecast` and the default value for this is 24h.
Expand Down
3 changes: 2 additions & 1 deletion secrets_emhass(example).yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ lat: 45.83
lon: 6.86
alt: 4807.8
solcast_api_key: yoursecretsolcastapikey
solcast_rooftop_id: yourrooftopid
solcast_rooftop_id: yourrooftopid
solar_forecast_kwp: 5
37 changes: 36 additions & 1 deletion src/emhass/forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,10 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper',
data['relative_humidity'] = raw_data['Relative Humidity (%)']
data['precipitable_water'] = pvlib.atmosphere.gueymard94_pw(
data['temp_air'], data['relative_humidity'])
'''import bz2 # Uncomment to save a serialized data for tests
import _pickle as cPickle
with bz2.BZ2File("test_response_scrapper_method.pbz2", "w") as f:
cPickle.dump(data, f)'''
elif method == 'solcast': # using solcast API
# Retrieve data from the solcast API
headers = {
Expand All @@ -220,6 +224,36 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper',
data = pd.DataFrame.from_dict(data_dict)
# Define index
data.set_index('ts', inplace=True)
'''import bz2 # Uncomment to save a serialized data for tests
import _pickle as cPickle
with bz2.BZ2File("test_response_solcast_method.pbz2", "w") as f:
cPickle.dump(data, f)'''
elif method == 'solar.forecast': # using the solar.forecast API
# Retrieve data from the solar.forecast API
headers = {
"Accept": "application/json"
}
url = "https://api.forecast.solar/estimate/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+\
"/"+str(self.plant_conf["surface_tilt"])+"/"+str(self.plant_conf["surface_azimuth"]-180)+\
"/"+str(self.retrieve_hass_conf["solar_forecast_kwp"])
response = get(url, headers=headers)
data_raw = response.json()
data_dict = {'ts':list(data_raw['result']['watts'].keys()), 'yhat':list(data_raw['result']['watts'].values())}
# Form the final DataFrame
data = pd.DataFrame.from_dict(data_dict)
data.set_index('ts', inplace=True)
data.index = pd.to_datetime(data.index)
data = data.tz_localize(self.forecast_dates.tz)
data = data.reindex(index=self.forecast_dates)
mask_up_data_df = data.copy(deep=True).fillna(method = "ffill").isnull()
mask_down_data_df = data.copy(deep=True).fillna(method = "bfill").isnull()
data.interpolate(inplace=True)
data.loc[data.index[mask_up_data_df['yhat']==True],:] = 0.0
data.loc[data.index[mask_down_data_df['yhat']==True],:] = 0.0
'''import bz2 # Uncomment to save a serialized data for tests
import _pickle as cPickle
with bz2.BZ2File("test_response_solarforecast_method.pbz2", "w") as f:
cPickle.dump(data, f)'''
elif method == 'csv': # reading from a csv file
weather_csv_file_path = self.root + csv_path
# Loading the csv file, we will consider that this is the PV power in W
Expand Down Expand Up @@ -313,7 +347,8 @@ def get_power_from_weather(self, df_weather: pd.DataFrame,
"""
# If using csv method we consider that yhat is the PV power in W
if self.weather_forecast_method == 'solcast' or self.weather_forecast_method == 'csv' or self.weather_forecast_method == 'list':
if self.weather_forecast_method == 'solcast' or self.weather_forecast_method == 'solar.forecast' or \
self.weather_forecast_method == 'csv' or self.weather_forecast_method == 'list':
P_PV_forecast = df_weather['yhat']
P_PV_forecast.name = None
else: # We will transform the weather data into electrical power
Expand Down
5 changes: 5 additions & 0 deletions src/emhass/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,13 @@ def treat_runtimeparams(runtimeparams: str, params:str, retrieve_hass_conf: dict
optim_conf['set_def_constant'] = runtimeparams['set_def_constant']
if 'solcast_api_key' in runtimeparams.keys():
retrieve_hass_conf['solcast_api_key'] = runtimeparams['solcast_api_key']
optim_conf['weather_forecast_method'] = 'solcast'
if 'solcast_rooftop_id' in runtimeparams.keys():
retrieve_hass_conf['solcast_rooftop_id'] = runtimeparams['solcast_rooftop_id']
optim_conf['weather_forecast_method'] = 'solcast'
if 'solar_forecast_kwp' in runtimeparams.keys():
retrieve_hass_conf['solar_forecast_kwp'] = runtimeparams['solar_forecast_kwp']
optim_conf['weather_forecast_method'] = 'solar.forecast'
params = json.dumps(params)
return params, retrieve_hass_conf, optim_conf

Expand Down
63 changes: 51 additions & 12 deletions tests/test_forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
# -*- coding: utf-8 -*-

import unittest
from unittest.mock import MagicMock
import pandas as pd
import pathlib, pickle, json, copy, yaml
import bz2
import _pickle as cPickle

from emhass.retrieve_hass import retrieve_hass
from emhass.forecast import forecast
Expand Down Expand Up @@ -44,7 +47,7 @@ def setUp(self):
self.fcst = forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
params, root, logger, get_data_from_file=self.get_data_from_file)
# The default for test is csv read
self.df_weather_scrap = self.fcst.get_weather_forecast(method='csv') # Still need to unittest these methods: 'scrapper','solcast','forecast.solar'
self.df_weather_scrap = self.fcst.get_weather_forecast(method='csv')
self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather_scrap)
self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method'])
self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1)
Expand All @@ -66,15 +69,6 @@ def setUp(self):
}

def test_get_weather_forecast(self):
# self.assertTrue(self.df_input_data.isnull().sum().sum()==0)
# self.assertIsInstance(self.df_weather_scrap, type(pd.DataFrame()))
# self.assertTrue(col in self.df_weather_scrap.columns for col in ['ghi', 'dni', 'dhi', 'temp_air'])
# self.assertIsInstance(self.df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex)
# self.assertIsInstance(self.df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
# self.assertEqual(self.df_weather_scrap.index.tz, self.fcst.time_zone)
# self.assertTrue(self.fcst.start_forecast < ts for ts in self.df_weather_scrap.index)
# self.assertEqual(len(self.df_weather_scrap),
# int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep))
self.df_weather_csv = self.fcst.get_weather_forecast(method='csv')
self.assertEqual(self.fcst.weather_forecast_method, 'csv')
self.assertIsInstance(self.df_weather_csv, type(pd.DataFrame()))
Expand All @@ -91,6 +85,51 @@ def test_get_weather_forecast(self):
self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone)
self.assertEqual(len(self.df_weather_csv), len(P_PV_forecast))

def test_get_weather_forecast_scrapper_method(self):
data = bz2.BZ2File(str(pathlib.Path(root+'/data/test_response_scrapper_method.pbz2')), "rb")
data = cPickle.load(data)
self.fcst.get_weather_forecast = MagicMock(return_value=data)
df_weather_scrap = self.fcst.get_weather_forecast(method='scrapper')
self.fcst.get_weather_forecast.assert_called_with(method='scrapper')
self.fcst.get_weather_forecast.assert_called_once()
self.assertIsInstance(df_weather_scrap, type(pd.DataFrame()))
self.assertIsInstance(df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertEqual(df_weather_scrap.index.tz, self.fcst.time_zone)
self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index)
self.assertEqual(len(df_weather_scrap),
int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep))

def test_get_weather_forecast_solcast_method(self):
data = bz2.BZ2File(str(pathlib.Path(root+'/data/test_response_solcast_method.pbz2')), "rb")
data = cPickle.load(data)
self.fcst.get_weather_forecast = MagicMock(return_value=data)
df_weather_solcast = self.fcst.get_weather_forecast(method='solcast')
self.fcst.get_weather_forecast.assert_called_with(method='solcast')
self.fcst.get_weather_forecast.assert_called_once()
self.assertIsInstance(df_weather_solcast, type(pd.DataFrame()))
self.assertIsInstance(df_weather_solcast.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(df_weather_solcast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertEqual(df_weather_solcast.index.tz, self.fcst.time_zone)
self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_solcast.index)
self.assertEqual(len(df_weather_solcast),
int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep))

def test_get_weather_forecast_solcast_method(self):
data = bz2.BZ2File(str(pathlib.Path(root+'/data/test_response_solarforecast_method.pbz2')), "rb")
data = cPickle.load(data)
self.fcst.get_weather_forecast = MagicMock(return_value=data)
df_weather_solarforecast = self.fcst.get_weather_forecast(method='solar.forecast')
self.fcst.get_weather_forecast.assert_called_with(method='solar.forecast')
self.fcst.get_weather_forecast.assert_called_once()
self.assertIsInstance(df_weather_solarforecast, type(pd.DataFrame()))
self.assertIsInstance(df_weather_solarforecast.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(df_weather_solarforecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertEqual(df_weather_solarforecast.index.tz, self.fcst.time_zone)
self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_solarforecast.index)
self.assertEqual(len(df_weather_solarforecast),
int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep))

def test_get_forecasts_with_lists(self):
with open(root+'/config_emhass.yaml', 'r') as file:
params = yaml.load(file, Loader=yaml.FullLoader)
Expand Down Expand Up @@ -181,7 +220,7 @@ def test_get_power_from_weather(self):
self.plant_conf['strings_per_inverter'] = [1, 1]
self.fcst = forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
None, root, logger, get_data_from_file=self.get_data_from_file)
df_weather_scrap = self.fcst.get_weather_forecast(method='scrapper')
df_weather_scrap = self.fcst.get_weather_forecast(method='csv')
P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap)
self.assertIsInstance(P_PV_forecast, pd.core.series.Series)
self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex)
Expand All @@ -193,7 +232,7 @@ def test_get_power_from_weather(self):
df_input_data = self.input_data_dict['rh'].df_final.copy()
self.fcst = forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
params, root, logger, get_data_from_file=self.get_data_from_file)
df_weather_scrap = self.fcst.get_weather_forecast(method='scrapper')
df_weather_scrap = self.fcst.get_weather_forecast(method='csv')
P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap, set_mix_forecast=True, df_now=df_input_data)
self.assertIsInstance(P_PV_forecast, pd.core.series.Series)
self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex)
Expand Down

0 comments on commit d168cbf

Please sign in to comment.