Skip to content

Commit

Permalink
Merge pull request #80 from davidusb-geek/dev/skforecast_bump_optim_d…
Browse files Browse the repository at this point in the history
…ynamics

Dev/skforecast bump optim dynamics
  • Loading branch information
davidusb-geek authored May 27, 2023
2 parents 1ac97c2 + df86537 commit 7ce17fa
Show file tree
Hide file tree
Showing 16 changed files with 233 additions and 148 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## Unreleased
### Improvement
- Adding new constraints to limit the dynamics (kW/sec) of deferrable loads and battery power. The LP formulation works correctly and a work should be done on integrating the user input parameters to control this functionality.
- Added new constraint to avoid battery discharging to the grid
### Fix
- Bumped version of skforecast from 0.6.0 to 0.8.0. Doing this mainly implies changing how the exogenous data is passed to fit and predict methods.
- Fixed wrong path for csv files when using load cost and prod price forecasts.

## [0.4.10] - 2023-05-21
### Fix
- Fixed wrong name of new cost sensor.
Expand Down
4 changes: 4 additions & 0 deletions config_emhass.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ optim_conf:
- lp_solver: 'PULP_CBC_CMD' # set the name of the linear programming solver that will be used
- lp_solver_path: 'empty' # set the path to the LP solver
- set_nocharge_from_grid: False # avoid battery charging from the grid
- set_nodischarge_to_grid: True # avoid battery discharging to the grid
- set_battery_dynamic: False # add a constraint to limit the dynamic of the battery power in power per time unit
- battery_dynamic_max: 0.9 # maximum dynamic positive power variation in percentage of battery maximum power
- battery_dynamic_min: -0.9 # minimum dynamic negative power variation in percentage of battery maximum power

plant_conf:
- P_grid_max: 9000 # The maximum power that can be supplied by the utility grid in Watts
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author = 'David HERNANDEZ'

# The full version, including alpha/beta/rc tags
release = '0.4.10'
release = '0.4.11'

# -- General configuration ---------------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pulp>=2.4
pyyaml>=5.4.1
netcdf4>=1.5.3
tables==3.7.0
skforecast==0.6.0
skforecast==0.8.0
markupsafe==2.1.2
Jinja2<3.2
sphinx==5.3.0
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ pulp>=2.4
pyyaml>=5.4.1
netcdf4>=1.5.3
tables==3.7.0
skforecast==0.6.0
skforecast==0.8.0
2 changes: 1 addition & 1 deletion requirements_webserver.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pulp>=2.4
pyyaml>=5.4.1
netcdf4>=1.5.3
tables==3.7.0
skforecast==0.6.0
skforecast==0.8.0
flask>=2.0.3
waitress>=2.1.1
plotly>=5.6.0
41 changes: 25 additions & 16 deletions scripts/use_cases_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
if __name__ == '__main__':
get_data_from_file = False
params = None
save_figures = False
retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True)
rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
Expand Down Expand Up @@ -71,25 +72,28 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
fig_inputs1.update_yaxes(title_text = "Powers (W)")
fig_inputs1.update_xaxes(title_text = "Time")
fig_inputs1.show()
fig_inputs1.write_image(root + "/docs/images/inputs_power.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_inputs1.write_image(root + "/docs/images/inputs_power.svg",
width=1080, height=0.8*1080)

fig_inputs2 = df_input_data[['unit_load_cost',
'unit_prod_price']].plot()
fig_inputs2.layout.template = template
fig_inputs2.update_yaxes(title_text = "Load cost and production sell price (EUR)")
fig_inputs2.update_xaxes(title_text = "Time")
fig_inputs2.show()
fig_inputs2.write_image(root + "/docs/images/inputs_cost_price.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_inputs2.write_image(root + "/docs/images/inputs_cost_price.svg",
width=1080, height=0.8*1080)

fig_inputs_dah = df_input_data_dayahead.plot()
fig_inputs_dah.layout.template = template
fig_inputs_dah.update_yaxes(title_text = "Powers (W)")
fig_inputs_dah.update_xaxes(title_text = "Time")
fig_inputs_dah.show()
fig_inputs_dah.write_image(root + "/docs/images/inputs_dayahead.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_inputs_dah.write_image(root + "/docs/images/inputs_dayahead.svg",
width=1080, height=0.8*1080)

# Let's first perform a perfect optimization
opt_res = opt.perform_perfect_forecast_optim(df_input_data, days_list)
Expand All @@ -98,8 +102,9 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
fig_res.update_yaxes(title_text = "Powers (W)")
fig_res.update_xaxes(title_text = "Time")
fig_res.show()
fig_res.write_image(root + "/docs/images/optim_results_PV_defLoads_perfectOptim.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_res.write_image(root + "/docs/images/optim_results_PV_defLoads_perfectOptim.svg",
width=1080, height=0.8*1080)

print("System with: PV, two deferrable loads, perfect optimization, profit >> total cost function sum: "+\
str(opt_res['cost_profit'].sum()))
Expand All @@ -113,8 +118,9 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
fig_res_dah.update_yaxes(title_text = "Powers (W)")
fig_res_dah.update_xaxes(title_text = "Time")
fig_res_dah.show()
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)

print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\
str(opt_res_dah['cost_profit'].sum()))
Expand All @@ -132,8 +138,9 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
fig_res_dah.update_yaxes(title_text = "Powers (W)")
fig_res_dah.update_xaxes(title_text = "Time")
fig_res_dah.show()
fig_res_dah.write_image(root + "/docs/images/optim_results_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_res_dah.write_image(root + "/docs/images/optim_results_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)

print("System with: two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\
str(opt_res_dah['cost_profit'].sum()))
Expand All @@ -152,15 +159,17 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
fig_res_dah.update_yaxes(title_text = "Powers (W)")
fig_res_dah.update_xaxes(title_text = "Time")
fig_res_dah.show()
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)
fig_res_dah = opt_res_dah[['SOC_opt']].plot()
fig_res_dah.layout.template = template
fig_res_dah.update_yaxes(title_text = "Battery State of Charge (%)")
fig_res_dah.update_xaxes(title_text = "Time")
fig_res_dah.show()
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.svg",
width=1080, height=0.8*1080)
if save_figures:
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.svg",
width=1080, height=0.8*1080)

print("System with: PV, Battery, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\
str(opt_res_dah['cost_profit'].sum()))
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

setup(
name='emhass', # Required
version='0.4.10', # Required
version='0.4.11', # Required
description='An Energy Management System for Home Assistant', # Optional
long_description=long_description, # Optional
long_description_content_type='text/markdown', # Optional (see note above)
Expand Down Expand Up @@ -57,7 +57,7 @@
'pyyaml>=5.4.1',
'netcdf4>=1.5.3',
'tables==3.7.0',
'skforecast==0.6.0',
'skforecast==0.8.0',
], # Optional
entry_points={ # Optional
'console_scripts': [
Expand Down
3 changes: 2 additions & 1 deletion src/emhass/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,8 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger,
input_data_dict['df_input_data_dayahead'],
method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method'])
df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast(
df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'])
df_input_data_dayahead,
method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'])
opt_res_dayahead = input_data_dict['opt'].perform_dayahead_forecast_optim(
df_input_data_dayahead, input_data_dict['P_PV_forecast'], input_data_dict['P_load_forecast'])
# Save CSV file for publish_data
Expand Down
4 changes: 2 additions & 2 deletions src/emhass/forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,7 +658,7 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O
return P_Load_forecast

def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'hp_hc_periods',
csv_path: Optional[str] = "/data/data_load_cost_forecast.csv") -> pd.DataFrame:
csv_path: Optional[str] = "data_load_cost_forecast.csv") -> pd.DataFrame:
r"""
Get the unit cost for the load consumption based on multiple tariff \
periods. This is the cost of the energy from the utility in a vector \
Expand All @@ -671,7 +671,7 @@ def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] =
and 'csv' to load a CSV file, defaults to 'hp_hc_periods'
:type method: str, optional
:param csv_path: The path to the CSV file used when method = 'csv', \
defaults to "/data/data_load_cost_forecast.csv"
defaults to "data_load_cost_forecast.csv"
:type csv_path: str, optional
:return: The input DataFrame with one additionnal column appended containing
the load cost for each time observation.
Expand Down
31 changes: 21 additions & 10 deletions src/emhass/machine_learning_forecaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,17 @@ def neg_r2_score(y_true, y_pred):
"""The negative of the r2 score."""
return -r2_score(y_true, y_pred)

@staticmethod
def generate_exog(data_last_window, periods, var_name):
"""Generate the exogenous data for future timestamps."""
forecast_dates = pd.date_range(start=data_last_window.index[-1]+data_last_window.index.freq,
periods=periods,
freq=data_last_window.index.freq)
exog = pd.DataFrame({var_name:[np.nan]*periods},
index=forecast_dates)
exog = mlforecaster.add_date_features(exog)
return exog

def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optional[bool] = False
) -> Tuple[pd.DataFrame, pd.DataFrame]:
r"""The fit method to train the ML model.
Expand All @@ -119,7 +130,7 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona
# train/test split
self.date_train = self.data_exo.index[-1]-pd.Timedelta('5days')+self.data_exo.index.freq # The last 5 days
self.date_split = self.data_exo.index[-1]-pd.Timedelta(split_date_delta)+self.data_exo.index.freq # The last 48h
self.data_train = self.data_exo.loc[:self.date_split,:]
self.data_train = self.data_exo.loc[:self.date_split-self.data_exo.index.freq,:]
self.data_test = self.data_exo.loc[self.date_split:,:]
self.steps = len(self.data_test)
# Pick correct sklearn model
Expand All @@ -143,7 +154,7 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona
exog=self.data_train.drop(self.var_model, axis=1))
self.logger.info(f"Elapsed time for model fit: {time.time() - start_time}")
# Make a prediction to print metrics
predictions = self.forecaster.predict(steps=self.steps, exog=self.data_train.drop(self.var_model, axis=1))
predictions = self.forecaster.predict(steps=self.steps, exog=self.data_test.drop(self.var_model, axis=1))
pred_metric = r2_score(self.data_test[self.var_model],predictions)
self.logger.info(f"Prediction R2 score of fitted model on test data: {pred_metric}")
# Packing results in a DataFrame
Expand Down Expand Up @@ -187,18 +198,18 @@ def predict(self, data_last_window: Optional[pd.DataFrame] = None
:rtype: pd.Series
"""
if data_last_window is None:
predictions = self.forecaster.predict(steps=self.num_lags, exog=self.data_train.drop(self.var_model, axis=1))
predictions = self.forecaster.predict(steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1))
else:
data_last_window = mlforecaster.add_date_features(data_last_window)
data_last_window = data_last_window.interpolate(method='linear', axis=0, limit=None)
if self.is_tuned:
exog = mlforecaster.generate_exog(data_last_window, self.lags_opt, self.var_model)
predictions = self.forecaster.predict(steps=self.lags_opt,
last_window=data_last_window[self.var_model],
exog=data_last_window.drop(self.var_model, axis=1))
exog=exog.drop(self.var_model, axis=1))
else:
exog = mlforecaster.generate_exog(data_last_window, self.num_lags, self.var_model)
predictions = self.forecaster.predict(steps=self.num_lags,
last_window=data_last_window[self.var_model],
exog=data_last_window.drop(self.var_model, axis=1))
exog=exog.drop(self.var_model, axis=1))
return predictions

def tune(self, debug: Optional[bool] = False) -> pd.DataFrame:
Expand All @@ -223,11 +234,11 @@ def tune(self, debug: Optional[bool] = False) -> pd.DataFrame:
if self.sklearn_model == 'LinearRegression':
if debug:
def search_space(trial):
search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', ['True'])}
search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True])}
return search_space
else:
def search_space(trial):
search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', ['True', 'False'])}
search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True, False])}
return search_space
elif self.sklearn_model == 'ElasticNet':
if debug:
Expand Down Expand Up @@ -276,7 +287,7 @@ def search_space(trial):
)
self.logger.info(f"Elapsed time: {time.time() - start_time}")
self.is_tuned = True
predictions_opt = self.forecaster.predict(steps=self.num_lags, exog=self.data_train.drop(self.var_model, axis=1))
predictions_opt = self.forecaster.predict(steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1))
freq_hours = self.data_exo.index.freq.delta.seconds/3600
self.lags_opt = int(np.round(len(self.optimize_results.iloc[0]['lags'])))
self.days_needed = int(np.round(self.lags_opt*freq_hours/24))
Expand Down
Loading

0 comments on commit 7ce17fa

Please sign in to comment.