From 03562b89cf98c2e8d4e7334da75517ddb2086dc8 Mon Sep 17 00:00:00 2001 From: GeoDerp <18461782+GeoDerp@users.noreply.github.com> Date: Fri, 2 Aug 2024 12:19:34 +0000 Subject: [PATCH 01/35] init standalone and addon merge --- .devcontainer/Dockerfile | 5 - .devcontainer/devcontainer.json | 19 +-- .devcontainer/setup.sh | 27 --- .vscode/launch.json | 24 --- .vscode/tasks.json | 3 +- Dockerfile | 90 ++-------- config_emhass.yaml | 92 ----------- options.json | 18 +- src/emhass/utils.py | 280 +++++++++++++++++--------------- src/emhass/web_server.py | 232 ++++++++++---------------- 10 files changed, 263 insertions(+), 527 deletions(-) delete mode 100644 .devcontainer/Dockerfile delete mode 100755 .devcontainer/setup.sh delete mode 100644 config_emhass.yaml diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index 337f928d..00000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM mcr.microsoft.com/devcontainers/python:0-3.11 -# EXPOSE 5000:5000 -COPY .devcontainer/setup.sh ./ -COPY requirements.txt ./ -RUN ./setup.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7b4662b8..73b448f5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,8 +3,9 @@ { "name": "EMHASS", "build": { - "dockerfile": "Dockerfile", - "context": ".." + "dockerfile": "../Dockerfile", + "context": "../", + "args": { "TARGETARCH": "amd64"} }, "features": { "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {} @@ -18,16 +19,4 @@ "extensions": ["ms-python.debugpy", "ms-python.python"] } } - - - // Features to add to the dev container. More info: https://containers.dev/features. - // "features": {}, - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], - // Use 'postCreateCommand' to run commands after the container is created. - // "postCreateCommand": "pip3 install --user -r requirements.txt", - // Configure tool-specific properties. - // "customizations": {}, - // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" -} \ No newline at end of file +} diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh deleted file mode 100755 index 2939a230..00000000 --- a/.devcontainer/setup.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -main() { - set -x - apt-get update - apt-get install -y --no-install-recommends \ - coinor-cbc \ - coinor-libcbc-dev \ - gcc \ - gfortran \ - libhdf5-dev \ - libhdf5-serial-dev \ - libnetcdf-dev \ - netcdf-bin - - ln -s /usr/include/hdf5/serial /usr/include/hdf5/include - export HDF5_DIR=/usr/include/hdf5 - pip install netCDF4 - - pip install -r requirements.txt - pip install requests-mock - - rm -rf "$0" -} - -main diff --git a/.vscode/launch.json b/.vscode/launch.json index 1800d75c..e0313551 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -18,30 +18,6 @@ "debug-in-terminal" ], "justMyCode": true, - "env": { - "CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", - "OPTIONS_PATH": "/workspaces/emhass/options.json", - "SECRETS_PATH": "/workspaces/emhass/secrets_emhass.yaml", - "DATA_PATH": "/workspaces/emhass/data/", - "LOGGING_LEVEL": "DEBUG" - } - }, - { - "name": "EMHASS run ADDON", - "type": "debugpy", - "request": "launch", - "module": "emhass.web_server", - "console": "integratedTerminal", - "args": [ - "--addon", - "true", - "--no_response", - "true" - ], - "purpose": [ - "debug-in-terminal" - ], - "justMyCode": true, "env": { "CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", "OPTIONS_PATH": "/workspaces/emhass/options.json", diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ee23d121..f3336bb6 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -12,6 +12,7 @@ "install", "--no-deps", "--force-reinstall", + "--break-system-packages", "--editable", "." ], @@ -29,7 +30,7 @@ "isDefault": true }, "args": [ - "install", "--force-reinstall", "." + "install", "--break-system-packages", "--force-reinstall", "." ], "presentation": { "echo": true, diff --git a/Dockerfile b/Dockerfile index 5873a51a..f609335a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,10 @@ ## EMHASS Docker ## Docker run addon testing example: - ## docker build -t emhass/docker --build-arg build_version=addon-local . + ## docker build -t emhass/docker ## docker run -it -p 5000:5000 --name emhass-container -e LAT="45.83" -e LON="6.86" -e ALT="4807.8" -e TIME_ZONE="Europe/Paris" emhass/docker --url YOURHAURLHERE --key YOURHAKEYHERE -## Docker run standalone example: - ## docker build -t emhass/docker --build-arg build_version=standalone . - ## docker run -it -p 5000:5000 --name emhass-container -v $(pwd)/config_emhass.yaml:/app/config_emhass.yaml -v $(pwd)/secrets_emhass.yaml:/app/secrets_emhass.yaml emhass/docker - -#build_version options are: addon, addon-pip, addon-git, addon-local, standalone (default) -ARG build_version=standalone - +#armhf,amd64,armv7,aarch64 +ARG TARGETARCH #armhf=raspbian, amd64,armv7,aarch64=debian ARG os_version=debian @@ -80,92 +75,29 @@ COPY config_emhass.yaml /app/ #make sure data directory exists RUN mkdir -p /app/data/ -#------------------------- -##EMHASS-Add-on default (this has no emhass package) -FROM base as addon - -LABEL \ - io.hass.name="emhass" \ - io.hass.description="EMHASS: Energy Management for Home Assistant" \ - io.hass.version=${BUILD_VERSION} \ - io.hass.type="addon" \ - io.hass.arch="aarch64|amd64|armhf|armv7" - -#----------- -#EMHASS-ADD-ON testing with pip emhass (EMHASS-Add-on testing reference) -FROM addon as addon-pip -#set build arg for pip version -ARG build_pip_version="" -RUN pip3 install --no-cache-dir --break-system-packages --upgrade --force-reinstall --no-deps --upgrade-strategy=only-if-needed -U emhass${build_pip_version} - -COPY options.json /app/ - -ENTRYPOINT [ "python3", "-m", "emhass.web_server","--addon", "True", "--no_response", "True"] - -#----------- -#EMHASS-Add-on testing from local files -FROM addon as addon-local +#copy required EMHASS files COPY src/emhass/ /app/src/emhass/ COPY src/emhass/templates/ /app/src/emhass/templates/ COPY src/emhass/static/ /app/src/emhass/static/ COPY src/emhass/static/img/ /app/src/emhass/static/img/ COPY src/emhass/data/ /app/src/emhass/data/ COPY data/opt_res_latest.csv /app/data/ -#add options.json, this otherwise would be generated via HA COPY options.json /app/ COPY README.md /app/ COPY setup.py /app/ -#compile EMHASS locally -RUN pip3 install --no-cache-dir --break-system-packages --no-deps --force-reinstall . -ENTRYPOINT [ "python3", "-m", "emhass.web_server","--addon", "True" , "--no_response", "True"] - - -#----------- -#EMHASS-Add-on testing with git -FROM addon as addon-git -ARG build_repo=https://github.com/davidusb-geek/emhass.git -ARG build_branch=master -WORKDIR /tmp/ -#Repo -RUN git clone $build_repo -WORKDIR /tmp/emhass -#Branch -RUN git checkout $build_branch -RUN mkdir -p /app/src/emhass/data/ -RUN cp -r /tmp/emhass/src/emhass/. /app/src/emhass/ -RUN cp /tmp/emhass/src/emhass/data/* /app/src/emhass/data/ -RUN cp /tmp/emhass/data/opt_res_latest.csv /app/data/ -RUN cp /tmp/emhass/setup.py /app/ -RUN cp /tmp/emhass/README.md /app/ -#add options.json, this otherwise would be generated via HA -RUN cp /tmp/emhass/options.json /app/ -WORKDIR /app -RUN pip3 install --no-cache-dir --break-system-packages --no-deps --force-reinstall . -ENTRYPOINT [ "python3", "-m", "emhass.web_server","--addon", "True" , "--no_response", "True"] - -#------------------------- -#EMHASS standalone -FROM base as standalone - -COPY src/emhass/ /app/src/emhass/ -COPY src/emhass/templates/ /app/src/emhass/templates/ -COPY src/emhass/static/ /app/src/emhass/static/ -COPY src/emhass/static/img/ /app/src/emhass/static/img/ -COPY src/emhass/data/ /app/src/emhass/data/ -COPY data/opt_res_latest.csv /app/data/ -COPY README.md /app/ -COPY setup.py /app/ #secrets file can be copied manually at docker run #set python env variables ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 +LABEL \ + io.hass.name="emhass" \ + io.hass.description="EMHASS: Energy Management for Home Assistant" \ + io.hass.version=${BUILD_VERSION} \ + io.hass.type="addon" \ + io.hass.arch="aarch64|amd64|armhf|armv7" + #build EMHASS RUN pip3 install --no-cache-dir --break-system-packages --no-deps --force-reinstall . ENTRYPOINT [ "python3", "-m", "emhass.web_server"] -#------------------------- - - -#check build arguments and build -FROM ${build_version} AS final \ No newline at end of file diff --git a/config_emhass.yaml b/config_emhass.yaml deleted file mode 100644 index ca3a47d7..00000000 --- a/config_emhass.yaml +++ /dev/null @@ -1,92 +0,0 @@ -# Configuration file for EMHASS - -retrieve_hass_conf: - freq: 30 # The time step to resample retrieved data from hass in minutes - days_to_retrieve: 2 # We will retrieve data from now and up to days_to_retrieve days - var_PV: 'sensor.power_photovoltaics' # Photovoltaic produced power sensor in Watts - var_load: 'sensor.power_load_no_var_loads' # Household power consumption sensor in Watts (deferrable loads should be substracted) - load_negative: False # Set to True if the retrived load variable is negative by convention - set_zero_min: True # A special treatment for a minimum value saturation to zero. Values below zero are replaced by nans - var_replace_zero: # A list of retrived variables that we would want to replace nans with zeros - - 'sensor.power_photovoltaics' - var_interp: # A list of retrived variables that we would want to interpolate nan values using linear interpolation - - 'sensor.power_photovoltaics' - - 'sensor.power_load_no_var_loads' - method_ts_round: 'nearest' # Set the method for timestamp rounding, options are: first, last and nearest - continual_publish: False # Save published sensor data and check for state change every freq minutes - -optim_conf: - set_use_battery: False # consider a battery storage - delta_forecast: 1 # days - num_def_loads: 2 - P_deferrable_nom: # Watts - - 3000.0 - - 750.0 - def_total_hours: # hours - - 5 - - 8 - def_start_timestep: # timesteps - - 0 - - 0 - def_end_timestep: # timesteps - - 0 - - 0 - treat_def_as_semi_cont: # treat this variable as semi continuous - - True - - True - set_def_constant: # set as a constant fixed value variable with just one startup for each 24h - - False - - False - def_start_penalty: # Set a penalty for each start up of a deferrable load - - 0.0 - - 0.0 - weather_forecast_method: 'scrapper' # options are 'scrapper', 'csv', 'list', 'solcast' and 'solar.forecast' - load_forecast_method: 'naive' # options are 'csv' to load a custom load forecast from a CSV file or 'naive' for a persistance model - load_cost_forecast_method: 'hp_hc_periods' # options are 'hp_hc_periods' for peak and non-peak hours contracts and 'csv' to load custom cost from CSV file - list_hp_periods: # list of different tariff periods (only needed if load_cost_forecast_method='hp_hc_periods') - - period_hp_1: - - start: '02:54' - - end: '15:24' - - period_hp_2: - - start: '17:24' - - end: '20:24' - load_cost_hp: 0.1907 # peak hours load cost in €/kWh (only needed if load_cost_forecast_method='hp_hc_periods') - load_cost_hc: 0.1419 # non-peak hours load cost in €/kWh (only needed if load_cost_forecast_method='hp_hc_periods') - prod_price_forecast_method: 'constant' # options are 'constant' for constant fixed value or 'csv' to load custom price forecast from a CSV file - prod_sell_price: 0.065 # power production selling price in €/kWh (only needed if prod_price_forecast_method='constant') - set_total_pv_sell: False # consider that all PV power is injected to the grid (self-consumption with total sell) - lp_solver: 'default' # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. - lp_solver_path: 'empty' # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc - set_nocharge_from_grid: False # avoid battery charging from the grid - set_nodischarge_to_grid: True # avoid battery discharging to the grid - set_battery_dynamic: False # add a constraint to limit the dynamic of the battery power in power per time unit - battery_dynamic_max: 0.9 # maximum dynamic positive power variation in percentage of battery maximum power - battery_dynamic_min: -0.9 # minimum dynamic negative power variation in percentage of battery maximum power - weight_battery_discharge: 0.0 # weight applied in cost function to battery usage for discharge - weight_battery_charge: 0.0 # weight applied in cost function to battery usage for charge - -plant_conf: - P_from_grid_max: 9000 # The maximum power that can be supplied by the utility grid in Watts - P_to_grid_max: 9000 # The maximum power that can be supplied to the utility grid in Watts - module_model: # The PV module model - - 'CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M' - inverter_model: # The PV inverter model - - 'Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_' - surface_tilt: # The tilt angle of your solar panels - - 30 - surface_azimuth: # The azimuth angle of your PV installation - - 205 - modules_per_string: # The number of modules per string - - 16 - strings_per_inverter: # The number of used strings per inverter - - 1 - inverter_is_hybrid: False # Set if it is a hybrid inverter (PV+batteries) or not - compute_curtailment: False # Compute a PV curtailment variable or not - Pd_max: 1000 # If your system has a battery (set_use_battery=True), the maximum discharge power in Watts - Pc_max: 1000 # If your system has a battery (set_use_battery=True), the maximum charge power in Watts - eta_disch: 0.95 # If your system has a battery (set_use_battery=True), the discharge efficiency - eta_ch: 0.95 # If your system has a battery (set_use_battery=True), the charge efficiency - Enom: 5000 # If your system has a battery (set_use_battery=True), the total capacity of the battery stack in Wh - SOCmin: 0.3 # If your system has a battery (set_use_battery=True), the minimun allowable battery state of charge - SOCmax: 0.9 # If your system has a battery (set_use_battery=True), the minimun allowable battery state of charge - SOCtarget: 0.6 # If your system has a battery (set_use_battery=True), the desired battery state of charge at the end of each optimization cycle diff --git a/options.json b/options.json index 01f83b52..736e34b7 100644 --- a/options.json +++ b/options.json @@ -1,7 +1,7 @@ { "hass_url": "empty", "long_lived_token": "empty", - "logging_level": "INFO", + "logging_level": "DEBUG", "costfun": "profit", "optimization_time_step": 30, "historic_days_to_retrieve": 2, @@ -23,6 +23,22 @@ "weight_battery_charge": 1.0, "sensor_power_photovoltaics": "sensor.power_photovoltaics", "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", + "list_sensor_replace_zero": [ + { + "sensor_replace_zero": "sensor.power_photovoltaics" + }, + { + "sensor_replace_zero": "sensor.power_load_no_var_loads" + } + ], + "list_sensor_linear_interp": [ + { + "sensor_linear_interp": "sensor.power_photovoltaics" + }, + { + "sensor_linear_interp": "sensor.power_load_no_var_loads" + } + ], "load_negative": false, "set_zero_min": true, "number_of_deferrable_loads": 2, diff --git a/src/emhass/utils.py b/src/emhass/utils.py index 3742a7e3..3c545b0c 100644 --- a/src/emhass/utils.py +++ b/src/emhass/utils.py @@ -763,10 +763,10 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF return injection_dict -def build_params(params: dict, params_secrets: dict, options: dict, addon: int, +def build_params(params: dict, params_secrets: dict, options: dict, logger: logging.Logger) -> dict: """ - Build the main params dictionary from the loaded options.json when using the add-on. + Build the main params dictionary from the loaded options.json. :param params: The main params dictionary :type params: dict @@ -774,143 +774,132 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int, :type params_secrets: dict :param options: The load dictionary from options.json :type options: dict - :param addon: A "bool" to select if we are using the add-on - :type addon: int :param logger: The logger object :type logger: logging.Logger :return: The builded dictionary :rtype: dict """ - if addon == 1: - # Updating variables in retrieve_hass_conf - params["retrieve_hass_conf"]["freq"] = options.get("optimization_time_step", params["retrieve_hass_conf"]["freq"]) - params["retrieve_hass_conf"]["days_to_retrieve"] = options.get("historic_days_to_retrieve", params["retrieve_hass_conf"]["days_to_retrieve"]) - params["retrieve_hass_conf"]["var_PV"] = options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]) - params["retrieve_hass_conf"]["var_load"] = options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"]) - params["retrieve_hass_conf"]["load_negative"] = options.get("load_negative", params["retrieve_hass_conf"]["load_negative"]) - params["retrieve_hass_conf"]["set_zero_min"] = options.get("set_zero_min", params["retrieve_hass_conf"]["set_zero_min"]) - params["retrieve_hass_conf"]["var_replace_zero"] = [options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_replace_zero"])] - params["retrieve_hass_conf"]["var_interp"] = [ - options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]), - options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"]) - ] - params["retrieve_hass_conf"]["method_ts_round"] = options.get("method_ts_round", params["retrieve_hass_conf"]["method_ts_round"]) - params["retrieve_hass_conf"]["continual_publish"] = options.get("continual_publish", params["retrieve_hass_conf"]["continual_publish"]) - # Update params Secrets if specified - params["params_secrets"] = params_secrets - params["params_secrets"]["time_zone"] = options.get("time_zone", params_secrets["time_zone"]) - params["params_secrets"]["lat"] = options.get("Latitude", params_secrets["lat"]) - params["params_secrets"]["lon"] = options.get("Longitude", params_secrets["lon"]) - params["params_secrets"]["alt"] = options.get("Altitude", params_secrets["alt"]) - # Updating variables in optim_conf - params["optim_conf"]["set_use_battery"] = options.get("set_use_battery", params["optim_conf"]["set_use_battery"]) - params["optim_conf"]["num_def_loads"] = options.get("number_of_deferrable_loads", params["optim_conf"]["num_def_loads"]) - if options.get("list_nominal_power_of_deferrable_loads", None) != None: - params["optim_conf"]["P_deferrable_nom"] = [i["nominal_power_of_deferrable_loads"] for i in options.get("list_nominal_power_of_deferrable_loads")] - if options.get("list_operating_hours_of_each_deferrable_load", None) != None: - params["optim_conf"]["def_total_hours"] = [i["operating_hours_of_each_deferrable_load"] for i in options.get("list_operating_hours_of_each_deferrable_load")] - if options.get("list_treat_deferrable_load_as_semi_cont", None) != None: - params["optim_conf"]["treat_def_as_semi_cont"] = [i["treat_deferrable_load_as_semi_cont"] for i in options.get("list_treat_deferrable_load_as_semi_cont")] - if options.get("list_set_deferrable_load_single_constant", None) != None: - params["optim_conf"]["set_def_constant"] = [i["set_deferrable_load_single_constant"] for i in options.get("list_set_deferrable_load_single_constant")] - if options.get("list_set_deferrable_startup_penalty", None) != None: - params["optim_conf"]["def_start_penalty"] = [i["set_deferrable_startup_penalty"] for i in options.get("list_set_deferrable_startup_penalty")] - params["optim_conf"]["weather_forecast_method"] = options.get("weather_forecast_method", params["optim_conf"]["weather_forecast_method"]) - # Update optional param secrets - if params["optim_conf"]["weather_forecast_method"] == "solcast": - params["params_secrets"]["solcast_api_key"] = options.get("optional_solcast_api_key", params_secrets.get("solcast_api_key", "123456")) - params["params_secrets"]["solcast_rooftop_id"] = options.get("optional_solcast_rooftop_id", params_secrets.get("solcast_rooftop_id", "123456")) - elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast": - params["params_secrets"]["solar_forecast_kwp"] = options.get("optional_solar_forecast_kwp", params_secrets.get("solar_forecast_kwp", 5)) - params["optim_conf"]["load_forecast_method"] = options.get("load_forecast_method", params["optim_conf"]["load_forecast_method"]) - params["optim_conf"]["delta_forecast"] = options.get("delta_forecast_daily", params["optim_conf"]["delta_forecast"]) - params["optim_conf"]["load_cost_forecast_method"] = options.get("load_cost_forecast_method", params["optim_conf"]["load_cost_forecast_method"]) - if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None): - start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]] - end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]] - num_peak_hours = len(start_hours_list) - list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)] - params['optim_conf']['list_hp_periods'] = list_hp_periods_list - params['optim_conf']['load_cost_hp'] = options.get('load_peak_hours_cost', params['optim_conf']['load_cost_hp']) - params['optim_conf']['load_cost_hc'] = options.get('load_offpeak_hours_cost', params['optim_conf']['load_cost_hc']) - params['optim_conf']['prod_price_forecast_method'] = options.get('production_price_forecast_method', params['optim_conf']['prod_price_forecast_method']) - params['optim_conf']['prod_sell_price'] = options.get('photovoltaic_production_sell_price', params['optim_conf']['prod_sell_price']) - params['optim_conf']['set_total_pv_sell'] = options.get('set_total_pv_sell', params['optim_conf']['set_total_pv_sell']) - params['optim_conf']['lp_solver'] = options.get('lp_solver', params['optim_conf']['lp_solver']) - params['optim_conf']['lp_solver_path'] = options.get('lp_solver_path', params['optim_conf']['lp_solver_path']) - params['optim_conf']['set_nocharge_from_grid'] = options.get('set_nocharge_from_grid', params['optim_conf']['set_nocharge_from_grid']) - params['optim_conf']['set_nodischarge_to_grid'] = options.get('set_nodischarge_to_grid', params['optim_conf']['set_nodischarge_to_grid']) - params['optim_conf']['set_battery_dynamic'] = options.get('set_battery_dynamic', params['optim_conf']['set_battery_dynamic']) - params['optim_conf']['battery_dynamic_max'] = options.get('battery_dynamic_max', params['optim_conf']['battery_dynamic_max']) - params['optim_conf']['battery_dynamic_min'] = options.get('battery_dynamic_min', params['optim_conf']['battery_dynamic_min']) - params['optim_conf']['weight_battery_discharge'] = options.get('weight_battery_discharge', params['optim_conf']['weight_battery_discharge']) - params['optim_conf']['weight_battery_charge'] = options.get('weight_battery_charge', params['optim_conf']['weight_battery_charge']) - if options.get('list_start_timesteps_of_each_deferrable_load',None) != None: - params['optim_conf']['def_start_timestep'] = [i['start_timesteps_of_each_deferrable_load'] for i in options.get('list_start_timesteps_of_each_deferrable_load')] - if options.get('list_end_timesteps_of_each_deferrable_load',None) != None: - params['optim_conf']['def_end_timestep'] = [i['end_timesteps_of_each_deferrable_load'] for i in options.get('list_end_timesteps_of_each_deferrable_load')] - # Updating variables in plant_conf - params['plant_conf']['P_from_grid_max'] = options.get('maximum_power_from_grid', params['plant_conf']['P_from_grid_max']) - params['plant_conf']['P_to_grid_max'] = options.get('maximum_power_to_grid', params['plant_conf']['P_to_grid_max']) - if options.get('list_pv_module_model',None) != None: - params['plant_conf']['module_model'] = [i['pv_module_model'] for i in options.get('list_pv_module_model')] - if options.get('list_pv_inverter_model',None) != None: - params['plant_conf']['inverter_model'] = [i['pv_inverter_model'] for i in options.get('list_pv_inverter_model')] - if options.get('list_surface_tilt',None) != None: - params['plant_conf']['surface_tilt'] = [i['surface_tilt'] for i in options.get('list_surface_tilt')] - if options.get('list_surface_azimuth',None) != None: - params['plant_conf']['surface_azimuth'] = [i['surface_azimuth'] for i in options.get('list_surface_azimuth')] - if options.get('list_modules_per_string',None) != None: - params['plant_conf']['modules_per_string'] = [i['modules_per_string'] for i in options.get('list_modules_per_string')] - if options.get('list_strings_per_inverter',None) != None: - params['plant_conf']['strings_per_inverter'] = [i['strings_per_inverter'] for i in options.get('list_strings_per_inverter')] - params["plant_conf"]["inverter_is_hybrid"] = options.get("inverter_is_hybrid", params["plant_conf"]["inverter_is_hybrid"]) - params["plant_conf"]["compute_curtailment"] = options.get("compute_curtailment", params["plant_conf"]["compute_curtailment"]) - params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max', params['plant_conf']['Pd_max']) - params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max', params['plant_conf']['Pc_max']) - params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency', params['plant_conf']['eta_disch']) - params['plant_conf']['eta_ch'] = options.get('battery_charge_efficiency', params['plant_conf']['eta_ch']) - params['plant_conf']['Enom'] = options.get('battery_nominal_energy_capacity', params['plant_conf']['Enom']) - params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge', params['plant_conf']['SOCmin']) - params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge', params['plant_conf']['SOCmax']) - params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge', params['plant_conf']['SOCtarget']) - # Check parameter lists have the same amounts as deferrable loads - # If not, set defaults it fill in gaps - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_timestep']): - logger.warning("def_start_timestep / list_start_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['def_start_timestep']), params['optim_conf']['num_def_loads']): - params['optim_conf']['def_start_timestep'].append(0) - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_end_timestep']): - logger.warning("def_end_timestep / list_end_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['def_end_timestep']), params['optim_conf']['num_def_loads']): - params['optim_conf']['def_end_timestep'].append(0) - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['set_def_constant']): - logger.warning("set_def_constant / list_set_deferrable_load_single_constant does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['set_def_constant']), params['optim_conf']['num_def_loads']): - params['optim_conf']['set_def_constant'].append(False) - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['treat_def_as_semi_cont']): - logger.warning("treat_def_as_semi_cont / list_treat_deferrable_load_as_semi_cont does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['treat_def_as_semi_cont']), params['optim_conf']['num_def_loads']): - params['optim_conf']['treat_def_as_semi_cont'].append(True) - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_penalty']): - logger.warning("def_start_penalty / list_set_deferrable_startup_penalty does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['def_start_penalty']), params['optim_conf']['num_def_loads']): - params['optim_conf']['def_start_penalty'].append(0.0) - # days_to_retrieve should be no less then 2 - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_total_hours']): - logger.warning("def_total_hours / list_operating_hours_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['def_total_hours']), params['optim_conf']['num_def_loads']): - params['optim_conf']['def_total_hours'].append(0) - if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['P_deferrable_nom']): - logger.warning("P_deferrable_nom / list_nominal_power_of_deferrable_loads does not match number in num_def_loads, adding default values to parameter") - for x in range(len(params['optim_conf']['P_deferrable_nom']), params['optim_conf']['num_def_loads']): - params['optim_conf']['P_deferrable_nom'].append(0) - # days_to_retrieve should be no less then 2 - if params["retrieve_hass_conf"]["days_to_retrieve"] < 2: - params["retrieve_hass_conf"]["days_to_retrieve"] = 2 - logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history") - else: - params["params_secrets"] = params_secrets + + params['params_secrets'] = params_secrets + params['retrieve_hass_conf'] = params.get('retrieve_hass_conf',{}) + params['optim_conf'] = params.get('optim_conf',{}) + params['plant_conf'] = params.get('plant_conf',{}) + + associations = [] + associations_dict = {} + associations.append(['retrieve_hass_conf', 'freq', 'optimization_time_step']) + associations.append(['retrieve_hass_conf', 'days_to_retrieve', 'historic_days_to_retrieve']) + associations.append(['retrieve_hass_conf', 'var_PV', 'sensor_power_photovoltaics']) + associations.append(['retrieve_hass_conf', 'var_load', 'sensor_power_load_no_var_loads']) + associations.append(['retrieve_hass_conf', 'load_negative', 'load_negative']) + associations.append(['retrieve_hass_conf', 'set_zero_min', 'set_zero_min']) + associations.append(['retrieve_hass_conf', 'var_replace_zero', 'list_sensor_replace_zero','sensor_replace_zero']) + associations.append(['retrieve_hass_conf', 'var_interp', 'list_sensor_linear_interp','sensor_linear_interp']) + associations.append(['retrieve_hass_conf', 'method_ts_round', 'method_ts_round']) + associations.append(['retrieve_hass_conf', 'continual_publish', 'continual_publish']) + associations.append(['params_secrets', 'time_zone', 'time_zone']) + associations.append(['params_secrets', 'lat', 'Latitude']) + associations.append(['params_secrets', 'lon', 'Longitude']) + associations.append(['params_secrets', 'alt', 'Altitude']) + associations.append(['optim_conf', 'set_use_battery', 'set_use_battery']) + associations.append(['optim_conf', 'num_def_loads', 'number_of_deferrable_loads']) + associations.append(['optim_conf', 'num_def_loads', 'number_of_deferrable_loads']) + associations.append(['optim_conf', 'P_deferrable_nom', 'list_nominal_power_of_deferrable_loads','nominal_power_of_deferrable_loads']) + associations.append(['optim_conf', 'def_total_hours', 'list_operating_hours_of_each_deferrable_load','operating_hours_of_each_deferrable_load']) + associations.append(['optim_conf', 'treat_def_as_semi_cont', 'list_treat_deferrable_load_as_semi_cont','treat_deferrable_load_as_semi_cont']) + associations.append(['optim_conf', 'set_def_constant', 'list_set_deferrable_load_single_constant','set_deferrable_load_single_constant']) + associations.append(['optim_conf', 'def_start_penalty', 'list_set_deferrable_startup_penalty','set_deferrable_startup_penalty']) + associations.append(['optim_conf', 'def_start_penalty', 'list_set_deferrable_startup_penalty','set_deferrable_startup_penalty']) + associations.append(['optim_conf', 'delta_forecast', 'delta_forecast_daily']) + associations.append(['optim_conf', 'load_forecast_method', 'load_forecast_method']) + associations.append(['optim_conf', 'load_cost_forecast_method', 'load_cost_forecast_method']) + associations.append(['optim_conf', 'load_cost_hp', 'load_peak_hours_cost']) + associations.append(['optim_conf', 'load_cost_hc', 'load_offpeak_hours_cost']) + associations.append(['optim_conf', 'prod_price_forecast_method', 'production_price_forecast_method']) + associations.append(['optim_conf', 'prod_sell_price', 'photovoltaic_production_sell_price']) + associations.append(['optim_conf', 'set_total_pv_sell', 'set_total_pv_sell']) + associations.append(['optim_conf', 'lp_solver', 'lp_solver']) + associations.append(['optim_conf', 'lp_solver_path', 'lp_solver_path']) + associations.append(['optim_conf', 'set_nocharge_from_grid', 'set_nocharge_from_grid']) + associations.append(['optim_conf', 'set_nodischarge_to_grid', 'set_nodischarge_to_grid']) + associations.append(['optim_conf', 'set_battery_dynamic', 'set_battery_dynamic']) + associations.append(['optim_conf', 'battery_dynamic_max', 'battery_dynamic_max']) + associations.append(['optim_conf', 'battery_dynamic_min', 'battery_dynamic_min']) + associations.append(['optim_conf', 'weight_battery_discharge', 'weight_battery_discharge']) + associations.append(['optim_conf', 'weight_battery_charge', 'weight_battery_charge']) + associations.append(['optim_conf', 'weather_forecast_method', 'weather_forecast_method']) + associations.append(['optim_conf', 'def_start_timestep', 'list_start_timesteps_of_each_deferrable_load','start_timesteps_of_each_deferrable_load']) + associations.append(['optim_conf', 'def_end_timestep', 'list_end_timesteps_of_each_deferrable_load','end_timesteps_of_each_deferrable_load']) + associations.append(['plant_conf', 'P_from_grid_max', 'maximum_power_from_grid']) + associations.append(['plant_conf', 'P_to_grid_max', 'maximum_power_to_grid']) + associations.append(['plant_conf', 'module_model', 'list_pv_module_model','pv_module_model']) + associations.append(['plant_conf', 'inverter_model', 'list_pv_inverter_model','pv_inverter_model']) + associations.append(['plant_conf', 'surface_tilt', 'list_surface_tilt','surface_tilt']) + associations.append(['plant_conf', 'surface_azimuth', 'list_surface_azimuth','surface_azimuth']) + associations.append(['plant_conf', 'modules_per_string', 'list_modules_per_string','modules_per_string']) + associations.append(['plant_conf', 'strings_per_inverter', 'list_strings_per_inverter','strings_per_inverter']) + associations.append(['plant_conf', 'inverter_is_hybrid', 'inverter_is_hybrid']) + associations.append(['plant_conf', 'compute_curtailment', 'compute_curtailment']) + associations.append(['plant_conf', 'Pd_max', 'battery_discharge_power_max']) + associations.append(['plant_conf', 'Pc_max', 'battery_charge_power_max']) + associations.append(['plant_conf', 'eta_disch', 'battery_discharge_efficiency']) + associations.append(['plant_conf', 'eta_ch', 'battery_charge_efficiency']) + associations.append(['plant_conf', 'Enom', 'battery_nominal_energy_capacity']) + associations.append(['plant_conf', 'SOCmin', 'battery_minimum_state_of_charge']) + associations.append(['plant_conf', 'SOCmax', 'battery_maximum_state_of_charge']) + associations.append(['plant_conf', 'SOCtarget', 'battery_target_state_of_charge']) + + logger.debug("Overriding config parameters with optional parameters with associations:") + for i in associations: + #logger.debug(str(i[1]) +":" + str(params[i[0]][i[1]])) + #check if params parameter has multiple options + if type(i[2]) is list: + params[i[0]][i[1]] = [] + for j in i[2]: + params[i[0]][i[1]].append(options[j]) + # Check if options are null + elif options.get(i[2],None) is not None: + # Check if options parameter is list + if len(i) == 4: + params[i[0]][i[1]] = [x[i[3]] for x in options[i[2]]] + elif type(options[i[2]]) is list: + params[i[0]][i[1]] = [options[i[2]]] + else: + params[i[0]][i[1]] = options[i[2]] + associations_dict.update({i[1]:i[2]}) + #logger.debug(str(i[1]) +":" + str(params[i[0]][i[1]])) + + # Update optional param secrets + if params["optim_conf"]["weather_forecast_method"] == "solcast": + params["params_secrets"]["solcast_api_key"] = options.get("optional_solcast_api_key", params_secrets.get("solcast_api_key", "123456")) + params["params_secrets"]["solcast_rooftop_id"] = options.get("optional_solcast_rooftop_id", params_secrets.get("solcast_rooftop_id", "123456")) + elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast": + params["params_secrets"]["solar_forecast_kwp"] = options.get("optional_solar_forecast_kwp", params_secrets.get("solar_forecast_kwp", 5)) + # Make and set list_hp_periods + if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None): + start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]] + end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]] + num_peak_hours = len(start_hours_list) + list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)] + params['optim_conf']['list_hp_periods'] = list_hp_periods_list + + # Check parameter lists have the same amounts as deferrable loads + # If not, set defaults it fill in gaps + num_def_loads = params['optim_conf']['num_def_loads'] + params['optim_conf']['def_start_timestep'] = check_def_loads(num_def_loads,params['optim_conf']['def_start_timestep'],0,'def_start_timestep',logger) + params['optim_conf']['def_end_timestep'] = check_def_loads(num_def_loads,params['optim_conf']['def_end_timestep'],0,'def_end_timestep',logger) + params['optim_conf']['set_def_constant'] = check_def_loads(num_def_loads,params['optim_conf']['set_def_constant'],False,'set_def_constant',logger) + params['optim_conf']['treat_def_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf']['treat_def_as_semi_cont'],True,'treat_def_as_semi_cont',logger) + params['optim_conf']['def_start_penalty'] = check_def_loads(num_def_loads,params['optim_conf']['def_start_penalty'],0.0,'def_start_penalty',logger) + params['optim_conf']['def_total_hours'] = check_def_loads(num_def_loads,params['optim_conf']['def_total_hours'],0,'def_total_hours',logger) + params['optim_conf']['P_deferrable_nom'] = check_def_loads(num_def_loads,params['optim_conf']['P_deferrable_nom'],0,'P_deferrable_nom',logger) + # days_to_retrieve should be no less then 2 + if params["retrieve_hass_conf"]["days_to_retrieve"] < 2: + params["retrieve_hass_conf"]["days_to_retrieve"] = 2 + logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history") + + # params['associations_dict'] = associations_dict + # The params dict params["passed_data"] = { "pv_power_forecast": None, @@ -928,6 +917,29 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int, } return params +def check_def_loads(num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger): + """ + Check parameter lists with deferrable loads number, if they do not match, enlarge to fit. + + :param num_def_loads: Total number deferrable loads + :type num_def_loads: int + :param parameter: parameter list to check length + :type: list[dict] + :param default: default value for parameter to pad missing + :type: obj + :param parameter_name: name of parameter + :type logger: str + :param logger: The logger object + :type logger: logging.Logger + return: parameter list + :rtype: list[dict] + + """ + if num_def_loads > len(parameter): + logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter") + for x in range(len(parameter), num_def_loads): + parameter.append(default) + def get_days_list(days_to_retrieve: int) -> pd.date_range: """ diff --git a/src/emhass/web_server.py b/src/emhass/web_server.py index 82439795..5453884f 100644 --- a/src/emhass/web_server.py +++ b/src/emhass/web_server.py @@ -242,81 +242,58 @@ def action_call(action_name): parser = argparse.ArgumentParser() parser.add_argument('--url', type=str, help='The URL to your Home Assistant instance, ex the external_url in your hass configuration') parser.add_argument('--key', type=str, help='Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token') - parser.add_argument('--addon', type=strtobool, default='False', help='Define if we are usinng EMHASS with the add-on or in standalone mode') + parser.add_argument('--addon', type=strtobool, default='False', help='Define if we are using EMHASS with the add-on or in standalone mode') parser.add_argument('--no_response', type=strtobool, default='False', help='This is set if json response errors occur') args = parser.parse_args() - #Obtain url and key from ENV or ARG (if any) - hass_url = os.getenv("EMHASS_URL", default=args.url) - key = os.getenv("SUPERVISOR_TOKEN", default=args.key) - if hass_url != "http://supervisor/core/api": - key = os.getenv("EMHASS_KEY", key) - #If url or key is None, Set as empty string to reduce NoneType errors bellow - if key is None: key = "" - if hass_url is None: hass_url = "" - - #find env's, not not set defaults + # Find env's, not not set defaults use_options = os.getenv('USE_OPTIONS', default=False) CONFIG_PATH = os.getenv("CONFIG_PATH", default="/app/config_emhass.yaml") OPTIONS_PATH = os.getenv('OPTIONS_PATH', default="/app/options.json") DATA_PATH = os.getenv("DATA_PATH", default="/app/data/") ROOT_PATH = os.getenv("ROOT_PATH", default=str(Path(__file__).parent)) - #options None by default + # Options None by default options = None # Define the paths - if args.addon==1: - options_json = Path(OPTIONS_PATH) - # Read options info - if options_json.exists(): - with options_json.open('r') as data: - options = json.load(data) - else: - app.logger.error("options.json does not exist") - raise Exception("options.json does not exist in path: "+str(options_json)) - else: - if use_options: - options_json = Path(OPTIONS_PATH) - # Read options info - if options_json.exists(): - with options_json.open('r') as data: - options = json.load(data) - else: - app.logger.error("options.json does not exist") - raise Exception("options.json does not exist in path: "+str(options_json)) - else: - options = None - - #if data path specified by options.json - if options is not None: - if options.get('data_path', None) != None and options.get('data_path', None) != "default": - DATA_PATH = options.get('data_path', None); - - #save paths to dictionary + options_json = Path(OPTIONS_PATH) config_path = Path(CONFIG_PATH) data_path = Path(DATA_PATH) root_path = Path(ROOT_PATH) emhass_conf = {} + emhass_conf['config_path'] = options_json emhass_conf['config_path'] = config_path emhass_conf['data_path'] = data_path emhass_conf['root_path'] = root_path - # Read the example default config file + # Read options info + if options_json.exists(): + with options_json.open('r') as data: + options = json.load(data) + else: + app.logger.error("options.json does not exist") + raise Exception("options.json does not exist in path: "+str(options_json)) + + # If data path specified by options.json + if options is not None: + if options.get('data_path', None) != None and options.get('data_path', None) != "default": + DATA_PATH = options.get('data_path', None); + + # Check to see if legacy config_emhass.yaml was provided + params = {} if config_path.exists(): with open(config_path, 'r') as file: config = yaml.load(file, Loader=yaml.FullLoader) retrieve_hass_conf = config['retrieve_hass_conf'] optim_conf = config['optim_conf'] plant_conf = config['plant_conf'] + params['retrieve_hass_conf'] = retrieve_hass_conf + params['optim_conf'] = optim_conf + params['plant_conf'] = plant_conf else: - app.logger.error("Unable to open the default configuration yaml file") - raise Exception("Failed to open config file, config_path: "+str(config_path)) - - params = {} - params['retrieve_hass_conf'] = retrieve_hass_conf - params['optim_conf'] = optim_conf - params['plant_conf'] = plant_conf + params = {} + web_ui_url = '0.0.0.0' # Initialize this global dict @@ -325,111 +302,67 @@ def action_call(action_name): injection_dict = pickle.load(fid) else: injection_dict = None + + ## secrets + params_secrets = {} - if args.addon==1: - # The cost function - costfun = options.get('costfun', 'profit') - # Some data from options - logging_level = options.get('logging_level','INFO') - url_from_options = options.get('hass_url', 'empty') - if url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api": - url = "http://supervisor/core/api/config" - else: - hass_url = url_from_options - url = hass_url+"api/config" - token_from_options = options.get('long_lived_token', 'empty') - if token_from_options == 'empty' or token_from_options == '': - long_lived_token = key - else: - long_lived_token = token_from_options + # secrets from ARG or ENV? + hass_url = os.getenv("EMHASS_URL", default=args.url) + key = os.getenv("SUPERVISOR_TOKEN", os.getenv("EMHASS_KEY", args.key)) + params_secrets['time_zone'] = os.getenv("TIME_ZONE", default="Europe/Paris") + params_secrets['lat'] = float(os.getenv("LAT", default="45.83")) + params_secrets['lon'] = float(os.getenv("LON", default="6.86")) + params_secrets['alt'] = float(os.getenv("ALT", default="4807.8")) + costfun = os.getenv('LOCAL_COSTFUN', options.get('costfun', 'profit')) + logging_level = os.getenv('LOGGING_LEVEL', options.get('logging_level','INFO')) + # if url or key is None, Set as empty string to reduce NoneType errors bellow + if key is None: key = "" + if hass_url is None: hass_url = "" + + # secrets from Home Assistant? + url_from_options = options.get('hass_url', 'empty') + key_from_options = options.get('long_lived_token', 'empty') + # to use Home Assistant local API + if (url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and os.getenv("SUPERVISOR_TOKEN", None) is not None: + hass_url = "http://supervisor/core/api/config" headers = { - "Authorization": "Bearer " + long_lived_token, - "content-type": "application/json" + "Authorization": "Bearer " + key, + "content-type": "application/json" } if not args.no_response==1: - response = get(url, headers=headers) - config_hass = response.json() - params_secrets = { - 'hass_url': hass_url, - 'long_lived_token': long_lived_token, - 'time_zone': config_hass['time_zone'], - 'lat': config_hass['latitude'], - 'lon': config_hass['longitude'], - 'alt': config_hass['elevation'] - } - else: #if no_response is set to true - costfun = os.getenv('LOCAL_COSTFUN', default='profit') - logging_level = os.getenv('LOGGING_LEVEL', default='INFO') - # check if secrets file exists - if Path(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml')).is_file(): + app.logger.debug("obtaining secrets from Home Assistant API") + response = get(hass_url, headers=headers) + if response.status_code < 400: + config_hass = response.json() + params_secrets = { + 'hass_url': hass_url, + 'long_lived_token': key, + 'time_zone': config_hass['time_zone'], + 'lat': config_hass['latitude'], + 'lon': config_hass['longitude'], + 'alt': config_hass['elevation'] + } + else: + hass_url = url_from_options + if key_from_options == 'empty' or key_from_options == '': + key = key_from_options + # secrets from secrets_emhass.yaml? + if Path(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml')).is_file(): + app.logger.debug("obtaining secrets from secrets file") with open(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), 'r') as file: params_secrets = yaml.load(file, Loader=yaml.FullLoader) - app.logger.debug("Obtained secrets from secrets file") - #If cant find secrets_emhass file, use env - else: - app.logger.debug("Failed to find secrets file: "+str(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'))) - app.logger.debug("Setting location defaults") - params_secrets = {} - #If no secrets file try args, else set some defaults - params_secrets['time_zone'] = os.getenv("TIME_ZONE", default="Europe/Paris") - params_secrets['lat'] = float(os.getenv("LAT", default="45.83")) - params_secrets['lon'] = float(os.getenv("LON", default="6.86")) - params_secrets['alt'] = float(os.getenv("ALT", default="4807.8")) - #If ARG/ENV specify url and key, then override secrets file - if hass_url != "": - params_secrets['hass_url'] = hass_url - app.logger.debug("Using URL obtained from ARG/ENV") - else: - hass_url = params_secrets.get('hass_url',"http://localhost:8123/") - if long_lived_token != "": - params_secrets['long_lived_token'] = long_lived_token - app.logger.debug("Using Key obtained from ARG/ENV") - else: #If addon is false - costfun = os.getenv('LOCAL_COSTFUN', default='profit') - logging_level = os.getenv('LOGGING_LEVEL', default='INFO') - if Path(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml')).is_file(): - with open(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), 'r') as file: - params_secrets = yaml.load(file, Loader=yaml.FullLoader) - #Check if URL and KEY are provided by file. If not attempt using values from ARG/ENV - if params_secrets.get("hass_url", "empty") == "empty" or params_secrets['hass_url'] == "": - app.logger.info("No specified Home Assistant URL in secrets_emhass.yaml. Attempting to get from ARG/ENV") - if hass_url != "": - params_secrets['hass_url'] = hass_url - else: - app.logger.error("Can not find Home Assistant URL from secrets_emhass.yaml or ARG/ENV") - raise Exception("Can not find Home Assistant URL from secrets_emhass.yaml or ARG/ENV") - else: - hass_url = params_secrets['hass_url'] - if params_secrets.get("long_lived_token", "empty") == "empty" or params_secrets['long_lived_token'] == "": - app.logger.info("No specified Home Assistant KEY in secrets_emhass.yaml. Attempting to get from ARG/ENV") - if key != "": - params_secrets['long_lived_token'] = key - else: - app.logger.error("Can not find Home Assistant KEY from secrets_emhass.yaml or ARG/ENV") - raise Exception("Can not find Home Assistant KEY from secrets_emhass.yaml or ARG/ENV") - else: #If no secrets file try args, else set some defaults - app.logger.info("Failed to find secrets_emhass.yaml in directory:" + os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml') ) - app.logger.info("Attempting to use secrets from arguments or environment variables") - params_secrets = {} - params_secrets['time_zone'] = os.getenv("TIME_ZONE", default="Europe/Paris") - params_secrets['lat'] = float(os.getenv("LAT", default="45.83")) - params_secrets['lon'] = float(os.getenv("LON", default="6.86")) - params_secrets['alt'] = float(os.getenv("ALT", default="4807.8")) - if hass_url != "": - params_secrets['hass_url'] = hass_url - else: #If cant find secrets_emhass and passed url ENV/ARG, then send error - app.logger.error("No specified Home Assistant URL") - raise Exception("Can not find Home Assistant URL from secrets_emhass.yaml or ARG/ENV") - if key != "": - params_secrets['long_lived_token'] = key - else: #If cant find secrets_emhass and passed key ENV/ARG, then send error - app.logger.error("No specified Home Assistant KEY") - raise Exception("Can not find Home Assistant KEY from secrets_emhass.yaml or ARG/ENV") + #Check if URL and KEY are provided by file. + if params_secrets.get("hass_url", "empty") != "empty": + hass_url = params_secrets['hass_url'] + if params_secrets.get("long_lived_token", "empty") != "empty": + key = params_secrets['long_lived_token'] + + + params_secrets['hass_url'] = hass_url + params_secrets['long_lived_token'] = key + # Build params - if use_options: - params = build_params(params, params_secrets, options, 1, app.logger) - else: - params = build_params(params, params_secrets, options, args.addon, app.logger) + params = build_params(params, params_secrets, options, app.logger) if os.path.exists(str(emhass_conf['data_path'])): with open(str(emhass_conf['data_path'] / 'params.pkl'), "wb") as fid: pickle.dump((config_path, params), fid) @@ -437,11 +370,11 @@ def action_call(action_name): raise Exception("missing: " + str(emhass_conf['data_path'])) # Define logger - #stream logger + # Stream logger ch = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) - #Action File logger + # Action file logger fileLogger = logging.FileHandler(str(emhass_conf['data_path'] / 'actionLogs.txt')) formatter = logging.Formatter('%(levelname)s - %(name)s - %(message)s') fileLogger.setFormatter(formatter) # add format to Handler @@ -468,7 +401,8 @@ def action_call(action_name): app.logger.propagate = False app.logger.addHandler(ch) app.logger.addHandler(fileLogger) - clearFileLog() #Clear Action File logger file, ready for new instance + # Clear Action File logger file, ready for new instance + clearFileLog() # If entity_path exists, remove any entity/metadata files entity_path = emhass_conf['data_path'] / "entities" From c31c71d3ff881cdc6c3057db2cf56abc13d5bae4 Mon Sep 17 00:00:00 2001 From: GeoDerp <18461782+GeoDerp@users.noreply.github.com> Date: Mon, 5 Aug 2024 07:49:44 +0000 Subject: [PATCH 02/35] Fix check_def_loads --- src/emhass/utils.py | 40 +++++++++++++--------------------------- 1 file changed, 13 insertions(+), 27 deletions(-) diff --git a/src/emhass/utils.py b/src/emhass/utils.py index 3c545b0c..9983dd66 100644 --- a/src/emhass/utils.py +++ b/src/emhass/utils.py @@ -567,24 +567,11 @@ def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True, :rtype: tuple(dict) """ - if params is None: - with open(emhass_conf["config_path"], 'r') as file: - input_conf = yaml.load(file, Loader=yaml.FullLoader) - else: - input_conf = json.loads(params) - if use_secrets: - if params is None: - with open(emhass_conf["config_path"].parent / 'secrets_emhass.yaml', 'r') as file: # Assume secrets and config file paths are the same - input_secrets = yaml.load(file, Loader=yaml.FullLoader) - else: - input_secrets = input_conf.pop("params_secrets", None) + input_conf = json.loads(params) + + input_secrets = input_conf.pop("params_secrets", None) - if type(input_conf["retrieve_hass_conf"]) == list: # if using old config version - retrieve_hass_conf = dict( - {key: d[key] for d in input_conf["retrieve_hass_conf"] for key in d} - ) - else: - retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {}) + retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {}) if use_secrets: retrieve_hass_conf.update(input_secrets) @@ -598,20 +585,17 @@ def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True, retrieve_hass_conf["freq"] = pd.to_timedelta(retrieve_hass_conf["freq"], "minutes") retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"]) - if type(input_conf["optim_conf"]) == list: - optim_conf = dict({key: d[key] for d in input_conf["optim_conf"] for key in d}) - else: - optim_conf = input_conf.get("optim_conf", {}) + optim_conf = input_conf.get("optim_conf", {}) + # Format list_hp_periods optim_conf["list_hp_periods"] = dict( (key, d[key]) for d in optim_conf["list_hp_periods"] for key in d ) + + #Format delta_forecast optim_conf["delta_forecast"] = pd.Timedelta(days=optim_conf["delta_forecast"]) - if type(input_conf["plant_conf"]) == list: - plant_conf = dict({key: d[key] for d in input_conf["plant_conf"] for key in d}) - else: - plant_conf = input_conf.get("plant_conf", {}) + plant_conf = input_conf.get("plant_conf", {}) return retrieve_hass_conf, optim_conf, plant_conf @@ -851,7 +835,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, logger.debug("Overriding config parameters with optional parameters with associations:") for i in associations: - #logger.debug(str(i[1]) +":" + str(params[i[0]][i[1]])) + #logger.info(str(i[1]) +":" + str(params[i[0]][i[1]])) #check if params parameter has multiple options if type(i[2]) is list: params[i[0]][i[1]] = [] @@ -867,7 +851,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, else: params[i[0]][i[1]] = options[i[2]] associations_dict.update({i[1]:i[2]}) - #logger.debug(str(i[1]) +":" + str(params[i[0]][i[1]])) + #logger.info(str(i[1]) +":" + str(params[i[0]][i[1]])) # Update optional param secrets if params["optim_conf"]["weather_forecast_method"] == "solcast": @@ -915,6 +899,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, "alpha": None, "beta": None, } + return params def check_def_loads(num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger): @@ -939,6 +924,7 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter") for x in range(len(parameter), num_def_loads): parameter.append(default) + return parameter def get_days_list(days_to_retrieve: int) -> pd.date_range: From d1f728a1083f3903d6f7077c9baf12e7d5b32833 Mon Sep 17 00:00:00 2001 From: GeoDerp <18461782+GeoDerp@users.noreply.github.com> Date: Sun, 18 Aug 2024 06:44:23 +0000 Subject: [PATCH 03/35] init merge config_emhass.yaml and options.json to config.json --- .devcontainer/devcontainer.json | 12 +- .vscode/launch.json | 5 +- Dockerfile | 4 +- README.md | 18 +- config.json | 164 ++++++ data/associations.csv | 61 +++ data/config_defaults.json | 163 ++++++ docs/lpems.md | 2 +- docs/mlforecaster.md | 2 +- options.json | 172 +----- secrets_emhass(example).yaml | 6 +- src/emhass/command_line.py | 214 +++++--- src/emhass/forecast.py | 48 +- src/emhass/optimization.py | 124 ++--- src/emhass/utils.py | 603 ++++++++++++++-------- src/emhass/web_server.py | 149 ++---- tests/test_command_line_utils.py | 112 ++-- tests/test_forecast.py | 260 ++++++---- tests/test_machine_learning_forecaster.py | 47 +- tests/test_machine_learning_regressor.py | 47 +- tests/test_optimization.py | 137 +++-- tests/test_retrieve_hass.py | 133 ++--- tests/test_utils.py | 210 ++++---- 23 files changed, 1598 insertions(+), 1095 deletions(-) create mode 100644 config.json create mode 100644 data/associations.csv create mode 100644 data/config_defaults.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 73b448f5..953c43d9 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -8,6 +8,13 @@ "args": { "TARGETARCH": "amd64"} }, "features": { + "ghcr.io/devcontainers/features/common-utils:2": { + "installZsh": "true", + "configureZshAsDefaultShell": "true", + "installOhMyZsh": "false", + "installOhMyZshConfig": "false", + "upgradePackages": "true" + }, "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {} }, //"appPort": ["5000:5000"] //used to access app from external device (User discretion advised) @@ -18,5 +25,8 @@ // Add the IDs of extensions you want installed when the container is created. "extensions": ["ms-python.debugpy", "ms-python.python"] } - } + }, + + "postCreateCommand": ["pip3", "install", "requests-mock", "--break-system-packages"] + } diff --git a/.vscode/launch.json b/.vscode/launch.json index e0313551..636c0056 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -19,8 +19,11 @@ ], "justMyCode": true, "env": { - "CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", + "LEGACY_CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", + "CONFIG_PATH": "/workspaces/emhass/config.json", "OPTIONS_PATH": "/workspaces/emhass/options.json", + "DEFAULTS_PATH": "/workspaces/emhass/data/config_defaults.json", + "ASSOCIATIONS_PATH": "/workspaces/emhass/data/associations.csv", "SECRETS_PATH": "/workspaces/emhass/secrets_emhass.yaml", "DATA_PATH": "/workspaces/emhass/data/", "EMHASS_URL": "http://HAIPHERE:8123/", //change diff --git a/Dockerfile b/Dockerfile index f609335a..0a75fff6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -69,8 +69,8 @@ RUN apt-get purge -y --auto-remove \ libnetcdf-dev \ && rm -rf /var/lib/apt/lists/* -#copy config file -COPY config_emhass.yaml /app/ +#copy default parameters +COPY config.json /app/ #make sure data directory exists RUN mkdir -p /app/data/ diff --git a/README.md b/README.md index d5a1a830..76e6f998 100644 --- a/README.md +++ b/README.md @@ -279,13 +279,13 @@ In `automations.yaml`: ``` in configuration page/`config_emhass.yaml` ```json -"method_ts_round": "first" -"continual_publish": true +'method_ts_round': "first" +'continual_publish': true ``` In this automation, the day-ahead optimization is performed once a day, every day at 5:30am. If the `freq` parameter is set to `30` *(default)* in the configuration, the results of the day-ahead optimization will generate 48 values *(for each entity)*, a value for every 30 minutes in a day *(i.e. 24 hrs x 2)*. -Setting the parameter `continual_publish` to `true` in the configuration page will allow EMHASS to store the optimization results as entities/sensors into separate json files. `continual_publish` will periodically (every `freq` amount of minutes) run a publish, and publish the optimization results of each generated entities/sensors to Home Assistant. The current state of the sensor/entity being updated every time publish runs, selecting one of the 48 stored values, by comparing the stored values' timestamps, the current timestamp and [`"method_ts_round": "first"`](#the-publish-data-specificities) to select the optimal stored value for the current state. +Setting the parameter `continual_publish` to `true` in the configuration page will allow EMHASS to store the optimization results as entities/sensors into separate json files. `continual_publish` will periodically (every `freq` amount of minutes) run a publish, and publish the optimization results of each generated entities/sensors to Home Assistant. The current state of the sensor/entity being updated every time publish runs, selecting one of the 48 stored values, by comparing the stored values' timestamps, the current timestamp and [`'method_ts_round': "first"`](#the-publish-data-specificities) to select the optimal stored value for the current state. option 1 and 2 are very similar, however, option 2 (`continual_publish`) will require a CPU thread to constantly be run inside of EMHASS, lowering efficiency. The reason why you may pick one over the other is explained in more detail below in [continual_publish](#continual_publish-emhass-automation). @@ -406,7 +406,7 @@ For users that wish to run multiple different optimizations, you can set the run # RUN dayahead, with freq=30 (default), prefix=dh_ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim # RUN MPC, with freq=5, prefix=mpc_ -curl -i -H 'Content-Type:application/json' -X POST -d '{"freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim +curl -i -H 'Content-Type:application/json' -X POST -d '{'optimization_time_step':5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim ``` This will tell continual_publish to loop every 5 minutes based on the freq passed in MPC. All entities from the output of dayahead "dh_" and MPC "mpc_" will be published every 5 minutes. @@ -416,13 +416,13 @@ This will tell continual_publish to loop every 5 minutes based on the freq passe #### Mixture of continual_publish and manual *(Home Assistant Automation for Publish)* -You can choose to save one optimization for continual_publish and bypass another optimization by setting `"continual_publish":false` runtime parameter: +You can choose to save one optimization for continual_publish and bypass another optimization by setting `'continual_publish':false` runtime parameter: ```bash # RUN dayahead, with freq=30 (default), prefix=dh_, included into continual_publish curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim # RUN MPC, with freq=5, prefix=mpc_, Manually publish, excluded from continual_publish loop -curl -i -H 'Content-Type:application/json' -X POST -d '{"continual_publish":false,"freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim +curl -i -H 'Content-Type:application/json' -X POST -d '{'continual_publish':false,'optimization_time_step':5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim # Publish MPC output curl -i -H 'Content-Type:application/json' -X POST -d {} http://localhost:5000/action/publish-data ``` @@ -434,14 +434,14 @@ For users who wish to have full control of exactly when they would like to run a in configuration page/`config_emhass.yaml` : ```json -"continual_publish": false +'continual_publish': false ``` POST action : ```bash # RUN dayahead, with freq=30 (default), prefix=dh_, save entity curl -i -H 'Content-Type:application/json' -X POST -d '{"entity_save": true, "publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim # RUN MPC, with freq=5, prefix=mpc_, save entity -curl -i -H 'Content-Type:application/json' -X POST -d '{"entity_save": true", "freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim +curl -i -H 'Content-Type:application/json' -X POST -d '{"entity_save": true", 'optimization_time_step':5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim ``` You can then reference these .json saved entities via their `publish_prefix`. Include the same `publish_prefix` in the `publish_data` action: ```bash @@ -554,7 +554,7 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, ``` *Example with :`def_total_hours`, `def_start_timestep`, `def_end_timestep`.* ```bash -curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, 70, 141.22, 246.18, 513.5, 753.27, 1049.89, 1797.93, 1697.3, 3078.93], "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,"def_total_hours":[1,3],"def_start_timestep":[0,3],"def_end_timestep":[0,6]}' http://localhost:5000/action/naive-mpc-optim +curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, 70, 141.22, 246.18, 513.5, 753.27, 1049.89, 1797.93, 1697.3, 3078.93], "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,'operating_hours_of_each_deferrable_load':[1,3],'start_timesteps_of_each_deferrable_load':[0,3],'end_timesteps_of_each_deferrable_load':[0,6]}' http://localhost:5000/action/naive-mpc-optim ``` ## A machine learning forecaster diff --git a/config.json b/config.json new file mode 100644 index 00000000..cabdca8e --- /dev/null +++ b/config.json @@ -0,0 +1,164 @@ +{ + "logging_level": "INFO", + "costfun": "profit", + "optimization_time_step": 30, + "historic_days_to_retrieve": 2, + "method_ts_round": "nearest", + "continual_publish": false, + "data_path": "default", + "set_total_pv_sell": false, + "lp_solver": "COIN_CMD", + "lp_solver_path": "/usr/bin/cbc", + "set_nocharge_from_grid": false, + "set_nodischarge_to_grid": true, + "set_battery_dynamic": false, + "battery_dynamic_max": 0.9, + "battery_dynamic_min": -0.9, + "weight_battery_discharge": 1.0, + "weight_battery_charge": 1.0, + "sensor_power_photovoltaics": "sensor.power_photovoltaics", + "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", + "list_sensor_replace_zero": [ + { + "sensor_replace_zero": "sensor.power_photovoltaics" + }, + { + "sensor_replace_zero": "sensor.power_load_no_var_loads" + } + ], + "list_sensor_linear_interp": [ + { + "sensor_linear_interp": "sensor.power_photovoltaics" + }, + { + "sensor_linear_interp": "sensor.power_load_no_var_loads" + } + ], + "load_negative": false, + "set_zero_min": true, + "number_of_deferrable_loads": 2, + "list_nominal_power_of_deferrable_loads": [ + { + "nominal_power_of_deferrable_loads": 3000.0 + }, + { + "nominal_power_of_deferrable_loads": 750.0 + } + ], + "list_operating_hours_of_each_deferrable_load": [ + { + "operating_hours_of_each_deferrable_load": 4 + }, + { + "operating_hours_of_each_deferrable_load": 0 + } + ], + "weather_forecast_method": "scrapper", + + "load_forecast_method": "naive", + "delta_forecast_daily": 1, + "load_cost_forecast_method": "hp_hc_periods", + "list_start_timesteps_of_each_deferrable_load": [ + { + "start_timesteps_of_each_deferrable_load": 0 + }, + { + "start_timesteps_of_each_deferrable_load": 0 + } + ], + "list_end_timesteps_of_each_deferrable_load": [ + { + "end_timesteps_of_each_deferrable_load": 0 + }, + { + "end_timesteps_of_each_deferrable_load": 0 + } + ], + "list_peak_hours_periods_start_hours": [ + { + "peak_hours_periods_start_hours": "02:54" + }, + { + "peak_hours_periods_start_hours": "17:24" + } + ], + "list_peak_hours_periods_end_hours": [ + { + "peak_hours_periods_end_hours": "15:24" + }, + { + "peak_hours_periods_end_hours": "20:54" + } + ], + "list_treat_deferrable_load_as_semi_cont": [ + { + "treat_deferrable_load_as_semi_cont": true + }, + { + "treat_deferrable_load_as_semi_cont": true + } + ], + "list_set_deferrable_load_single_constant": [ + { + "set_deferrable_load_single_constant": false + }, + { + "set_deferrable_load_single_constant": false + } + ], + "list_set_deferrable_startup_penalty": [ + { + "set_deferrable_startup_penalty": 0.0 + }, + { + "set_deferrable_startup_penalty": 0.0 + } + ], + "load_peak_hours_cost": 0.1907, + "load_offpeak_hours_cost": 0.1419, + "production_price_forecast_method": "constant", + "photovoltaic_production_sell_price": 0.1419, + "maximum_power_from_grid": 9000, + "maximum_power_to_grid": 9000, + "list_pv_module_model": [ + { + "pv_module_model": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" + } + ], + "list_pv_inverter_model": [ + { + "pv_inverter_model": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" + } + ], + "list_surface_tilt": [ + { + "surface_tilt": 30 + } + ], + "list_surface_azimuth": [ + { + "surface_azimuth": 205 + } + ], + "list_modules_per_string": [ + { + "modules_per_string": 16 + } + ], + "list_strings_per_inverter": [ + { + "strings_per_inverter": 1 + } + ], + "inverter_is_hybrid": false, + "compute_curtailment": false, + "set_use_battery": false, + "battery_discharge_power_max": 1000, + "battery_charge_power_max": 1000, + "battery_discharge_efficiency": 0.95, + "battery_charge_efficiency": 0.95, + "battery_nominal_energy_capacity": 5000, + "battery_minimum_state_of_charge": 0.3, + "battery_maximum_state_of_charge": 0.9, + "battery_target_state_of_charge": 0.6 +} diff --git a/data/associations.csv b/data/associations.csv new file mode 100644 index 00000000..511e54e0 --- /dev/null +++ b/data/associations.csv @@ -0,0 +1,61 @@ +config,legacy_parameter_name,parameter,list_name +retrieve_hass_conf,freq,optimization_time_step +retrieve_hass_conf,days_to_retrieve,historic_days_to_retrieve +retrieve_hass_conf,var_PV,sensor_power_photovoltaics +retrieve_hass_conf,var_load,sensor_power_load_no_var_loads +retrieve_hass_conf,load_negative,load_negative +retrieve_hass_conf,set_zero_min,set_zero_min +retrieve_hass_conf,var_replace_zero,sensor_replace_zero,list_sensor_replace_zero +retrieve_hass_conf,var_interp,sensor_linear_interp,list_sensor_linear_interp +retrieve_hass_conf,method_ts_round,method_ts_round +retrieve_hass_conf,continual_publish,continual_publish +params_secrets,time_zone,time_zone +params_secrets,lat,Latitude +params_secrets,lon,Longitude +params_secrets,alt,Altitude +optim_conf,set_use_battery,set_use_battery +optim_conf,num_def_loads,number_of_deferrable_loads +optim_conf,P_deferrable_nom,nominal_power_of_deferrable_loads,list_nominal_power_of_deferrable_loads +optim_conf,def_total_hours,operating_hours_of_each_deferrable_load,list_operating_hours_of_each_deferrable_load +optim_conf,treat_def_as_semi_cont,treat_deferrable_load_as_semi_cont,list_treat_deferrable_load_as_semi_cont +optim_conf,set_def_constant,set_deferrable_load_single_constant,list_set_deferrable_load_single_constant +optim_conf,def_start_penalty,set_deferrable_startup_penalty,list_set_deferrable_startup_penalty +optim_conf,delta_forecast,delta_forecast_daily +optim_conf,load_forecast_method,load_forecast_method +optim_conf,load_cost_forecast_method,load_cost_forecast_method +optim_conf,load_cost_hp,load_peak_hours_cost +optim_conf,load_cost_hc,load_offpeak_hours_cost +optim_conf,prod_price_forecast_method,production_price_forecast_method +optim_conf,prod_sell_price,photovoltaic_production_sell_price +optim_conf,set_total_pv_sell,set_total_pv_sell +optim_conf,lp_solver,lp_solver +optim_conf,lp_solver_path,lp_solver_path +optim_conf,set_nocharge_from_grid,set_nocharge_from_grid +optim_conf,set_nodischarge_to_grid,set_nodischarge_to_grid +optim_conf,set_battery_dynamic,set_battery_dynamic +optim_conf,battery_dynamic_max,battery_dynamic_max +optim_conf,battery_dynamic_min,battery_dynamic_min +optim_conf,weight_battery_discharge,weight_battery_discharge +optim_conf,weight_battery_charge,weight_battery_charge +optim_conf,weather_forecast_method,weather_forecast_method +optim_conf,def_start_timestep,start_timesteps_of_each_deferrable_load,list_start_timesteps_of_each_deferrable_load +optim_conf,def_end_timestep,end_timesteps_of_each_deferrable_load,list_end_timesteps_of_each_deferrable_load +optim_conf,list_hp_periods,load_peak_hour_periods +plant_conf,P_from_grid_max,maximum_power_from_grid +plant_conf,P_to_grid_max,maximum_power_to_grid +plant_conf,module_model,pv_module_model,list_pv_module_model +plant_conf,inverter_model,pv_inverter_model,list_pv_inverter_model +plant_conf,surface_tilt,surface_tilt,list_surface_tilt +plant_conf,surface_azimuth,surface_azimuth,list_surface_azimuth +plant_conf,modules_per_string,modules_per_string,list_modules_per_string +plant_conf,strings_per_inverter,strings_per_inverter,list_strings_per_inverter +plant_conf,inverter_is_hybrid,inverter_is_hybrid +plant_conf,compute_curtailment,compute_curtailment +plant_conf,Pd_max,battery_discharge_power_max +plant_conf,Pc_max,battery_charge_power_max +plant_conf,eta_disch,battery_discharge_efficiency +plant_conf,eta_ch,battery_charge_efficiency +plant_conf,Enom,battery_nominal_energy_capacity +plant_conf,SOCmin,battery_minimum_state_of_charge +plant_conf,SOCmax,battery_maximum_state_of_charge +plant_conf,SOCtarget,battery_target_state_of_charge \ No newline at end of file diff --git a/data/config_defaults.json b/data/config_defaults.json new file mode 100644 index 00000000..5e26274d --- /dev/null +++ b/data/config_defaults.json @@ -0,0 +1,163 @@ +{ + "logging_level": "INFO", + "costfun": "profit", + "optimization_time_step": 30, + "historic_days_to_retrieve": 2, + "method_ts_round": "nearest", + "continual_publish": false, + "data_path": "default", + "set_total_pv_sell": false, + "lp_solver": "COIN_CMD", + "lp_solver_path": "/usr/bin/cbc", + "set_nocharge_from_grid": false, + "set_nodischarge_to_grid": true, + "set_battery_dynamic": false, + "battery_dynamic_max": 0.9, + "battery_dynamic_min": -0.9, + "weight_battery_discharge": 1.0, + "weight_battery_charge": 1.0, + "sensor_power_photovoltaics": "sensor.power_photovoltaics", + "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", + "list_sensor_replace_zero": [ + { + "sensor_replace_zero": "sensor.power_photovoltaics" + }, + { + "sensor_replace_zero": "sensor.power_load_no_var_loads" + } + ], + "list_sensor_linear_interp": [ + { + "sensor_linear_interp": "sensor.power_photovoltaics" + }, + { + "sensor_linear_interp": "sensor.power_load_no_var_loads" + } + ], + "load_negative": false, + "set_zero_min": true, + "number_of_deferrable_loads": 2, + "list_nominal_power_of_deferrable_loads": [ + { + "nominal_power_of_deferrable_loads": 3000.0 + }, + { + "nominal_power_of_deferrable_loads": 750.0 + } + ], + "list_operating_hours_of_each_deferrable_load": [ + { + "operating_hours_of_each_deferrable_load": 4 + }, + { + "operating_hours_of_each_deferrable_load": 0 + } + ], + "weather_forecast_method": "scrapper", + "load_forecast_method": "naive", + "delta_forecast_daily": 1, + "load_cost_forecast_method": "hp_hc_periods", + "list_start_timesteps_of_each_deferrable_load": [ + { + "start_timesteps_of_each_deferrable_load": 0 + }, + { + "start_timesteps_of_each_deferrable_load": 0 + } + ], + "list_end_timesteps_of_each_deferrable_load": [ + { + "end_timesteps_of_each_deferrable_load": 0 + }, + { + "end_timesteps_of_each_deferrable_load": 0 + } + ], + "list_peak_hours_periods_start_hours": [ + { + "peak_hours_periods_start_hours": "02:54" + }, + { + "peak_hours_periods_start_hours": "17:24" + } + ], + "list_peak_hours_periods_end_hours": [ + { + "peak_hours_periods_end_hours": "15:24" + }, + { + "peak_hours_periods_end_hours": "20:54" + } + ], + "list_treat_deferrable_load_as_semi_cont": [ + { + "treat_deferrable_load_as_semi_cont": true + }, + { + "treat_deferrable_load_as_semi_cont": true + } + ], + "list_set_deferrable_load_single_constant": [ + { + "set_deferrable_load_single_constant": false + }, + { + "set_deferrable_load_single_constant": false + } + ], + "list_set_deferrable_startup_penalty": [ + { + "set_deferrable_startup_penalty": 0.0 + }, + { + "set_deferrable_startup_penalty": 0.0 + } + ], + "load_peak_hours_cost": 0.1907, + "load_offpeak_hours_cost": 0.1419, + "production_price_forecast_method": "constant", + "photovoltaic_production_sell_price": 0.1419, + "maximum_power_from_grid": 9000, + "maximum_power_to_grid": 9000, + "list_pv_module_model": [ + { + "pv_module_model": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" + } + ], + "list_pv_inverter_model": [ + { + "pv_inverter_model": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" + } + ], + "list_surface_tilt": [ + { + "surface_tilt": 30 + } + ], + "list_surface_azimuth": [ + { + "surface_azimuth": 205 + } + ], + "list_modules_per_string": [ + { + "modules_per_string": 16 + } + ], + "list_strings_per_inverter": [ + { + "strings_per_inverter": 1 + } + ], + "inverter_is_hybrid": false, + "compute_curtailment": false, + "set_use_battery": false, + "battery_discharge_power_max": 1000, + "battery_charge_power_max": 1000, + "battery_discharge_efficiency": 0.95, + "battery_charge_efficiency": 0.95, + "battery_nominal_energy_capacity": 5000, + "battery_minimum_state_of_charge": 0.3, + "battery_maximum_state_of_charge": 0.9, + "battery_target_state_of_charge": 0.6 +} diff --git a/docs/lpems.md b/docs/lpems.md index 4c56eb29..59ad8481 100644 --- a/docs/lpems.md +++ b/docs/lpems.md @@ -238,7 +238,7 @@ Either in the Home Assistant add-on config screen: Either as runtime parameter: ``` -curl -i -H 'Content-Type:application/json' -X POST -d '{"prediction_horizon":30, "def_total_hours":[4,2],"def_start_timestep":[4,0],"def_end_timestep":[27,23]}' http://localhost:5000/action/naive-mpc-optim +curl -i -H 'Content-Type:application/json' -X POST -d '{"prediction_horizon":30, 'operating_hours_of_each_deferrable_load':[4,2],'start_timesteps_of_each_deferrable_load':[4,0],'end_timesteps_of_each_deferrable_load':[27,23]}' http://localhost:5000/action/naive-mpc-optim ``` Please note that the proposed deferrable load time windows will be submitted to a validation step & can be automatically corrected. diff --git a/docs/mlforecaster.md b/docs/mlforecaster.md index 5933bae8..4ca358ef 100644 --- a/docs/mlforecaster.md +++ b/docs/mlforecaster.md @@ -37,7 +37,7 @@ The minimum number of `days_to_retrieve` is hard coded to 9 by default. However, The default values for these parameters are: ```yaml runtimeparams = { - "days_to_retrieve": 9, + 'historic_days_to_retrieve': 9, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", diff --git a/options.json b/options.json index 736e34b7..c9c86def 100644 --- a/options.json +++ b/options.json @@ -1,172 +1,12 @@ { "hass_url": "empty", "long_lived_token": "empty", - "logging_level": "DEBUG", - "costfun": "profit", - "optimization_time_step": 30, - "historic_days_to_retrieve": 2, - "method_ts_round": "nearest", - "continual_publish": false, "optional_solcast_api_key": "empty", "optional_solcast_rooftop_id": "empty", - "optional_solar_forecast_kwp": 5, - "data_path": "default", - "set_total_pv_sell": false, - "lp_solver": "COIN_CMD", - "lp_solver_path": "/usr/bin/cbc", - "set_nocharge_from_grid": false, - "set_nodischarge_to_grid": true, - "set_battery_dynamic": false, - "battery_dynamic_max": 0.9, - "battery_dynamic_min": -0.9, - "weight_battery_discharge": 1.0, - "weight_battery_charge": 1.0, - "sensor_power_photovoltaics": "sensor.power_photovoltaics", - "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", - "list_sensor_replace_zero": [ - { - "sensor_replace_zero": "sensor.power_photovoltaics" - }, - { - "sensor_replace_zero": "sensor.power_load_no_var_loads" - } - ], - "list_sensor_linear_interp": [ - { - "sensor_linear_interp": "sensor.power_photovoltaics" - }, - { - "sensor_linear_interp": "sensor.power_load_no_var_loads" - } - ], - "load_negative": false, - "set_zero_min": true, - "number_of_deferrable_loads": 2, - "list_nominal_power_of_deferrable_loads": [ - { - "nominal_power_of_deferrable_loads": 3000 - }, - { - "nominal_power_of_deferrable_loads": 750 - } - ], - "list_operating_hours_of_each_deferrable_load": [ - { - "operating_hours_of_each_deferrable_load": 4 - }, - { - "operating_hours_of_each_deferrable_load": 0 - } - ], - "weather_forecast_method": "scrapper", + "optional_solar_forecast_kwp": 0, "time_zone": "Europe/Paris", - "Latitude": 45.83, - "Longitude": 6.86, - "Altitude": 4807.8, - "load_forecast_method": "naive", - "delta_forecast_daily": 1, - "load_cost_forecast_method": "hp_hc_periods", - "list_start_timesteps_of_each_deferrable_load": [ - { - "start_timesteps_of_each_deferrable_load": 0 - }, - { - "start_timesteps_of_each_deferrable_load": 0 - } - ], - "list_end_timesteps_of_each_deferrable_load": [ - { - "end_timesteps_of_each_deferrable_load": 0 - }, - { - "end_timesteps_of_each_deferrable_load": 0 - } - ], - "list_peak_hours_periods_start_hours": [ - { - "peak_hours_periods_start_hours": "02:54" - }, - { - "peak_hours_periods_start_hours": "17:24" - } - ], - "list_peak_hours_periods_end_hours": [ - { - "peak_hours_periods_end_hours": "15:24" - }, - { - "peak_hours_periods_end_hours": "20:54" - } - ], - "list_treat_deferrable_load_as_semi_cont": [ - { - "treat_deferrable_load_as_semi_cont": true - }, - { - "treat_deferrable_load_as_semi_cont": true - } - ], - "list_set_deferrable_load_single_constant": [ - { - "set_deferrable_load_single_constant": false - }, - { - "set_deferrable_load_single_constant": false - } - ], - "list_set_deferrable_startup_penalty": [ - { - "set_deferrable_startup_penalty": 0.0 - }, - { - "set_deferrable_startup_penalty": 0.0 - } - ], - "load_peak_hours_cost": 0.1907, - "load_offpeak_hours_cost": 0.1419, - "production_price_forecast_method": "constant", - "photovoltaic_production_sell_price": 0.1419, - "maximum_power_from_grid": 9000, - "maximum_power_to_grid": 9000, - "list_pv_module_model": [ - { - "pv_module_model": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" - } - ], - "list_pv_inverter_model": [ - { - "pv_inverter_model": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" - } - ], - "list_surface_tilt": [ - { - "surface_tilt": 30 - } - ], - "list_surface_azimuth": [ - { - "surface_azimuth": 205 - } - ], - "list_modules_per_string": [ - { - "modules_per_string": 16 - } - ], - "list_strings_per_inverter": [ - { - "strings_per_inverter": 1 - } - ], - "inverter_is_hybrid": false, - "compute_curtailment": false, - "set_use_battery": false, - "battery_discharge_power_max": 1000, - "battery_charge_power_max": 1000, - "battery_discharge_efficiency": 0.95, - "battery_charge_efficiency": 0.95, - "battery_nominal_energy_capacity": 5000, - "battery_minimum_state_of_charge": 0.3, - "battery_maximum_state_of_charge": 0.9, - "battery_target_state_of_charge": 0.6 -} + "Latitude": 0, + "Longitude": 0, + "Altitude": 0, + "data_path": "default" +} \ No newline at end of file diff --git a/secrets_emhass(example).yaml b/secrets_emhass(example).yaml index 312ae7b3..40c203b8 100644 --- a/secrets_emhass(example).yaml +++ b/secrets_emhass(example).yaml @@ -4,9 +4,9 @@ hass_url: https://myhass.duckdns.org/ long_lived_token: thatverylongtokenhere time_zone: Europe/Paris -lat: 45.83 -lon: 6.86 -alt: 4807.8 +Latitude: 45.83 +Longitude: 6.86 +Altitude: 4807.8 solcast_api_key: yoursecretsolcastapikey solcast_rooftop_id: yourrooftopid solar_forecast_kwp: 5 \ No newline at end of file diff --git a/src/emhass/command_line.py b/src/emhass/command_line.py index 707510e4..000e800f 100644 --- a/src/emhass/command_line.py +++ b/src/emhass/command_line.py @@ -3,6 +3,7 @@ import argparse import os +import re import time import pathlib import logging @@ -51,14 +52,16 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, """ logger.info("Setting up needed data") # Parsing yaml - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( - emhass_conf, use_secrets=not(get_data_from_file), params=params) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) + if type(retrieve_hass_conf) is bool: + return False + # Treat runtimeparams params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) # Define main objects rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], params, emhass_conf, logger, get_data_from_file=get_data_from_file) fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger, get_data_from_file=get_data_from_file) @@ -71,24 +74,24 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, if get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['var_load'] = str(var_list[0]) - retrieve_hass_conf['var_PV'] = str(var_list[1]) - retrieve_hass_conf['var_interp'] = [ - retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - retrieve_hass_conf['var_replace_zero'] = [ - retrieve_hass_conf['var_PV']] + retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) + retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) + retrieve_hass_conf['sensor_linear_interp'] = [ + retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + retrieve_hass_conf['sensor_replace_zero'] = [ + retrieve_hass_conf['sensor_power_photovoltaics']] else: days_list = utils.get_days_list( - retrieve_hass_conf["days_to_retrieve"]) - var_list = [retrieve_hass_conf["var_load"], - retrieve_hass_conf["var_PV"]] + retrieve_hass_conf['historic_days_to_retrieve']) + var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], + retrieve_hass_conf['sensor_power_photovoltaics']] if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False): return False - if not rh.prepare_data(retrieve_hass_conf["var_load"], - load_negative=retrieve_hass_conf["load_negative"], - set_zero_min=retrieve_hass_conf["set_zero_min"], - var_replace_zero=retrieve_hass_conf["var_replace_zero"], - var_interp=retrieve_hass_conf["var_interp"]): + if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], + load_negative=retrieve_hass_conf['load_negative'], + set_zero_min=retrieve_hass_conf['set_zero_min'], + var_replace_zero=retrieve_hass_conf['sensor_replace_zero'], + var_interp=retrieve_hass_conf['sensor_linear_interp']): return False df_input_data = rh.df_final.copy() # What we don't need for this type of action @@ -96,7 +99,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, elif set_type == "dayahead-optim": # Get PV and load forecasts df_weather = fcst.get_weather_forecast( - method=optim_conf["weather_forecast_method"]) + method=optim_conf['weather_forecast_method']) if isinstance(df_weather, bool) and not df_weather: return False P_PV_forecast = fcst.get_power_from_weather(df_weather) @@ -122,23 +125,23 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, if get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['var_load'] = str(var_list[0]) - retrieve_hass_conf['var_PV'] = str(var_list[1]) - retrieve_hass_conf['var_interp'] = [ - retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - retrieve_hass_conf['var_replace_zero'] = [ - retrieve_hass_conf['var_PV']] + retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) + retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) + retrieve_hass_conf['sensor_linear_interp'] = [ + retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + retrieve_hass_conf['sensor_replace_zero'] = [ + retrieve_hass_conf['sensor_power_photovoltaics']] else: days_list = utils.get_days_list(1) - var_list = [retrieve_hass_conf["var_load"], - retrieve_hass_conf["var_PV"]] + var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], + retrieve_hass_conf['sensor_power_photovoltaics']] if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False): return False - if not rh.prepare_data(retrieve_hass_conf["var_load"], - load_negative=retrieve_hass_conf["load_negative"], - set_zero_min=retrieve_hass_conf["set_zero_min"], - var_replace_zero=retrieve_hass_conf["var_replace_zero"], - var_interp=retrieve_hass_conf["var_interp"]): + if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], + load_negative=retrieve_hass_conf['load_negative'], + set_zero_min=retrieve_hass_conf['set_zero_min'], + var_replace_zero=retrieve_hass_conf['sensor_replace_zero'], + var_interp=retrieve_hass_conf['sensor_linear_interp']): return False df_input_data = rh.df_final.copy() # Get PV and load forecasts @@ -167,7 +170,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, P_PV_forecast, P_load_forecast = None, None params = json.loads(params) # Retrieve data from hass - days_to_retrieve = params["passed_data"]["days_to_retrieve"] + days_to_retrieve = params["passed_data"]['historic_days_to_retrieve'] model_type = params["passed_data"]["model_type"] var_model = params["passed_data"]["var_model"] if get_data_from_file: @@ -266,8 +269,7 @@ def weather_forecast_cache(emhass_conf: dict, params: str, """ # Parsing yaml - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( - emhass_conf, use_secrets=True, params=params) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger) # Treat runtimeparams params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( @@ -285,7 +287,7 @@ def weather_forecast_cache(emhass_conf: dict, params: str, fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger) - result = fcst.get_weather_forecast(optim_conf["weather_forecast_method"]) + result = fcst.get_weather_forecast(optim_conf['weather_forecast_method']) if isinstance(result, bool) and not result: return False @@ -318,7 +320,7 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger, if isinstance(df_input_data, bool) and not df_input_data: return False df_input_data = input_data_dict['fcst'].get_prod_price_forecast( - df_input_data, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'], + df_input_data, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method'], list_and_perfect=True) if isinstance(df_input_data, bool) and not df_input_data: return False @@ -339,7 +341,7 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger, params = input_data_dict["params"] # if continual_publish, save perfect results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False): + if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): #Trigger the publish function, save entity data and not post to HA publish_data(input_data_dict, logger, entity_save=True, dont_post=True) @@ -372,7 +374,7 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger, return False df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast( df_input_data_dayahead, - method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method']) + method=input_data_dict['fcst'].optim_conf['production_price_forecast_method']) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]: @@ -398,7 +400,7 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger, params = input_data_dict["params"] # if continual_publish, save day_ahead results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False): + if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): #Trigger the publish function, save entity data and not post to HA publish_data(input_data_dict, logger, entity_save=True, dont_post=True) @@ -430,7 +432,7 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast( - df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method']) + df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method']) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]: @@ -440,9 +442,9 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, prediction_horizon = input_data_dict["params"]["passed_data"]["prediction_horizon"] soc_init = input_data_dict["params"]["passed_data"]["soc_init"] soc_final = input_data_dict["params"]["passed_data"]["soc_final"] - def_total_hours = input_data_dict["params"]["passed_data"]["def_total_hours"] - def_start_timestep = input_data_dict["params"]["passed_data"]["def_start_timestep"] - def_end_timestep = input_data_dict["params"]["passed_data"]["def_end_timestep"] + def_total_hours = input_data_dict["params"]["passed_data"]['operating_hours_of_each_deferrable_load'] + def_start_timestep = input_data_dict["params"]["passed_data"]['start_timesteps_of_each_deferrable_load'] + def_end_timestep = input_data_dict["params"]["passed_data"]['end_timesteps_of_each_deferrable_load'] opt_res_naive_mpc = input_data_dict["opt"].perform_naive_mpc_optim( df_input_data_dayahead, input_data_dict["P_PV_forecast"], input_data_dict["P_load_forecast"], prediction_horizon, soc_init, soc_final, def_total_hours, @@ -465,7 +467,7 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, params = input_data_dict["params"] # if continual_publish, save mpc results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False): + if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): #Trigger the publish function, save entity data and not post to HA publish_data(input_data_dict, logger, entity_save=True, dont_post=True) @@ -568,11 +570,11 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, now_precise = datetime.now( input_data_dict["retrieve_hass_conf"]["time_zone"] ).replace(second=0, microsecond=0) - if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": + if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": idx_closest = predictions.index.get_indexer([now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": idx_closest = predictions.index.get_indexer([now_precise], method="bfill")[0] if idx_closest == -1: idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0] @@ -749,10 +751,13 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, """ logger.info("Publishing data to HASS instance") - if not isinstance(input_data_dict["params"],dict): - params = json.loads(input_data_dict["params"]) - else: - params = input_data_dict["params"] + if input_data_dict: + if not isinstance(input_data_dict.get("params",{}),dict): + params = json.loads(input_data_dict["params"]) + else: + params = input_data_dict.get("params",{}) + + # Check if a day ahead optimization has been performed (read CSV file) if save_data_to_file: today = datetime.now(timezone.utc).replace( @@ -799,17 +804,17 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, opt_res_latest = pd.read_csv( input_data_dict['emhass_conf']['data_path'] / filename, index_col='timestamp') opt_res_latest.index = pd.to_datetime(opt_res_latest.index) - opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"]["freq"] + opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"]['optimization_time_step'] # Estimate the current index now_precise = datetime.now( input_data_dict["retrieve_hass_conf"]["time_zone"] ).replace(second=0, microsecond=0) - if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": + if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": idx_closest = opt_res_latest.index.get_indexer( [now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": idx_closest = opt_res_latest.index.get_indexer( [now_precise], method="bfill")[0] if idx_closest == -1: @@ -877,7 +882,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, custom_deferrable_forecast_id = params["passed_data"][ "custom_deferrable_forecast_id" ] - for k in range(input_data_dict["opt"].optim_conf["num_def_loads"]): + for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']): if "P_deferrable{}".format(k) not in opt_res_latest.columns: logger.error( "P_deferrable{}".format(k) @@ -900,7 +905,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, custom_predicted_temperature_id = params["passed_data"][ "custom_predicted_temperature_id" ] - for k in range(input_data_dict["opt"].optim_conf["num_def_loads"]): + for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']): if "def_load_config" in input_data_dict["opt"].optim_conf.keys(): if "thermal_config" in input_data_dict["opt"].optim_conf["def_load_config"][k]: input_data_dict["rh"].post_data( @@ -916,7 +921,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, ) cols_published = cols_published + ["predicted_temp_heater{}".format(k)] # Publish battery power - if input_data_dict["opt"].optim_conf["set_use_battery"]: + if input_data_dict["opt"].optim_conf['set_use_battery']: if "P_batt" not in opt_res_latest.columns: logger.error( "P_batt was not found in results DataFrame. Optimization task may need to be relaunched or it did not converge to a solution.", @@ -1045,7 +1050,7 @@ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger: """ logger.info("Continual publish thread service started") - freq = input_data_dict['retrieve_hass_conf'].get("freq", pd.to_timedelta(1, "minutes")) + freq = input_data_dict['retrieve_hass_conf'].get('optimization_time_step', pd.to_timedelta(1, "minutes")) entity_path_contents = [] while True: # Sleep for x seconds (using current time as a reference for time left) @@ -1056,7 +1061,7 @@ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger: for entity in entity_path_contents: if entity != "metadata.json": # Call publish_json with entity file, build entity, and publish - publish_json(entity, input_data_dict, entity_path, logger, "continual_publish") + publish_json(entity, input_data_dict, entity_path, logger, 'continual_publish') pass # This function should never return return False @@ -1097,18 +1102,18 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path, entity_data.columns = [metadata[entity_id]["name"]] entity_data.index.name = "timestamp" entity_data.index = pd.to_datetime(entity_data.index).tz_convert(input_data_dict["retrieve_hass_conf"]["time_zone"]) - entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id]["freq"]), "minutes") + entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id]['optimization_time_step']), "minutes") # Calculate the current state value - if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": + if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": idx_closest = entity_data.index.get_indexer([now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": + elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": idx_closest = entity_data.index.get_indexer([now_precise], method="bfill")[0] if idx_closest == -1: idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0] # Call post data - if reference == "continual_publish": + if reference == 'continual_publish': logger.debug("Auto Published sensor:") logger_levels = "DEBUG" else: @@ -1153,7 +1158,11 @@ def main(): parser.add_argument('--action', type=str, help='Set the desired action, options are: perfect-optim, dayahead-optim,\ naive-mpc-optim, publish-data, forecast-model-fit, forecast-model-predict, forecast-model-tune') parser.add_argument('--config', type=str, - help='Define path to the config.yaml file') + help='Define path to the config.json/defaults.json file') + parser.add_argument('--params', type=str, default=None, + help='String of configuration parameters passed') + parser.add_argument('--associations', type=str, + help='Define path to the associations.csv file') parser.add_argument('--data', type=str, help='Define path to the Data files (.csv & .pkl)') parser.add_argument('--root', type=str, help='Define path emhass root') @@ -1161,19 +1170,20 @@ def main(): help='Define the type of cost function, options are: profit, cost, self-consumption') parser.add_argument('--log2file', type=strtobool, default='False', help='Define if we should log to a file or not') - parser.add_argument('--params', type=str, default=None, - help='Configuration parameters passed from data/options.json') + parser.add_argument('--secrets', type=str, default=None, + help='Define secret parameter file (secrets_emhass.yaml) path') parser.add_argument('--runtimeparams', type=str, default=None, help='Pass runtime optimization parameters as dictionnary') parser.add_argument('--debug', type=strtobool, default='False', help='Use True for testing purposes') args = parser.parse_args() + # The path to the configuration files if args.config is not None: config_path = pathlib.Path(args.config) else: config_path = pathlib.Path( - str(utils.get_root(__file__, num_parent=2) / 'config_emhass.yaml')) + str(utils.get_root(__file__, num_parent=3) / 'config.json')) if args.data is not None: data_path = pathlib.Path(args.data) else: @@ -1181,30 +1191,47 @@ def main(): if args.root is not None: root_path = pathlib.Path(args.root) else: - root_path = config_path.parent + root_path = utils.get_root(__file__, num_parent=1) + if args.associations is not None: + associations_path = pathlib.Path(args.associations) + else: + associations_path = data_path / 'associations.csv' + + defaults_path = data_path / 'config_defaults.json' + emhass_conf = {} emhass_conf['config_path'] = config_path emhass_conf['data_path'] = data_path emhass_conf['root_path'] = root_path + emhass_conf['associations_path'] = associations_path + emhass_conf['defaults_path'] = defaults_path # create logger logger, ch = utils.get_logger( __name__, emhass_conf, save_to_file=bool(args.log2file)) + + # Check paths logger.debug("config path: " + str(config_path)) logger.debug("data path: " + str(data_path)) logger.debug("root path: " + str(root_path)) - if not config_path.exists(): + if not associations_path.exists(): + logger.error( + "Could not find associations.csv file in: " + str(associations_path)) + logger.error("Try setting config file path with --associations") + return False + if not config_path.exists(): logger.error( "Could not find config_emhass.yaml file in: " + str(config_path)) logger.error("Try setting config file path with --config") return False if not os.path.isdir(data_path): - logger.error("Could not find data foulder in: " + str(data_path)) + logger.error("Could not find data folder in: " + str(data_path)) logger.error("Try setting data path with --data") return False - if not os.path.isdir(root_path / 'src'): - logger.error("Could not find emhass/src foulder in: " + str(root_path)) + if not os.path.isdir(root_path): + logger.error("Could not find emhass/src folder in: " + str(root_path)) logger.error("Try setting emhass root path with --root") return False + # Additional argument try: parser.add_argument( @@ -1217,10 +1244,47 @@ def main(): logger.info( "Version not found for emhass package. Or importlib exited with PackageNotFoundError.", ) + # Setup parameters + config = {} + # Check if config is yaml of json, build params accordingly + config_file_ending = re.findall("(?<=\.).*$", str(config_path)) + if len(config_file_ending) > 0: + match(config_file_ending[0]): + case "json": + config = utils.build_config(emhass_conf,logger,defaults_path,config_path,) + case "yaml": + config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path) + case "yml": + config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path) + + + # Obtain secrets from secrets_emhass.yaml? + params_secrets = {} + secrets_path = "" + if args.secrets is not None: + secrets_path = pathlib.Path(args.secrets) + else: + secrets_path = pathlib.Path(config_path.parent / 'secrets_emhass.yaml') + # Combine secrets from ENV,ARG, Secrets file + emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger,secrets_path=secrets_path) + params_secrets.update(built_secrets) + + # Build params + params = utils.build_params(emhass_conf, params_secrets, config, logger) + if type(params) is bool: + raise Exception("A error has occured while building parameters") + + # Add any argument params + if args.params: + params.update(json.loads(args.params)) + input_data_dict = set_input_data_dict(emhass_conf, - args.costfun, args.params, args.runtimeparams, args.action, + args.costfun, json.dumps(params), args.runtimeparams, args.action, logger, args.debug) + if type(input_data_dict) is bool: + raise Exception("A error has occured while creating action objects") + # Perform selected action if args.action == "perfect-optim": opt_res = perfect_forecast_optim( diff --git a/src/emhass/forecast.py b/src/emhass/forecast.py index 7058c220..f974c07d 100644 --- a/src/emhass/forecast.py +++ b/src/emhass/forecast.py @@ -132,16 +132,16 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, self.retrieve_hass_conf = retrieve_hass_conf self.optim_conf = optim_conf self.plant_conf = plant_conf - self.freq = self.retrieve_hass_conf['freq'] + self.freq = self.retrieve_hass_conf['optimization_time_step'] self.time_zone = self.retrieve_hass_conf['time_zone'] self.method_ts_round = self.retrieve_hass_conf['method_ts_round'] self.timeStep = self.freq.seconds/3600 # in hours self.time_delta = pd.to_timedelta(opt_time_delta, "hours") - self.var_PV = self.retrieve_hass_conf['var_PV'] - self.var_load = self.retrieve_hass_conf['var_load'] + self.var_PV = self.retrieve_hass_conf['sensor_power_photovoltaics'] + self.var_load = self.retrieve_hass_conf['sensor_power_load_no_var_loads'] self.var_load_new = self.var_load+'_positive' - self.lat = self.retrieve_hass_conf['lat'] - self.lon = self.retrieve_hass_conf['lon'] + self.lat = self.retrieve_hass_conf['Latitude'] + self.lon = self.retrieve_hass_conf['Longitude'] self.emhass_conf = emhass_conf self.logger = logger self.get_data_from_file = get_data_from_file @@ -159,7 +159,7 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, self.start_forecast = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).ceil(freq=self.freq) else: self.logger.error("Wrong method_ts_round passed parameter") - self.end_forecast = (self.start_forecast + self.optim_conf['delta_forecast']).replace(microsecond=0) + self.end_forecast = (self.start_forecast + self.optim_conf['delta_forecast_daily']).replace(microsecond=0) self.forecast_dates = pd.date_range(start=self.start_forecast, end=self.end_forecast-self.freq, freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward') @@ -323,7 +323,7 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper', if self.retrieve_hass_conf['solar_forecast_kwp'] == 0: self.logger.warning("The solar_forecast_kwp parameter is set to zero, setting to default 5") self.retrieve_hass_conf['solar_forecast_kwp'] = 5 - if self.optim_conf['delta_forecast'].days > 1: + if self.optim_conf['delta_forecast_daily'].days > 1: self.logger.warning("The free public tier for solar.forecast only provides one day forecasts") self.logger.warning("Continuing with just the first day of data, the other days are filled with 0.0.") self.logger.warning("Use the other available methods for delta_forecast > 1") @@ -331,9 +331,9 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper', "Accept": "application/json" } data = pd.DataFrame() - for i in range(len(self.plant_conf['module_model'])): + for i in range(len(self.plant_conf['pv_module_model'])): url = "https://api.forecast.solar/estimate/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+\ - "/"+str(self.plant_conf["surface_tilt"][i])+"/"+str(self.plant_conf["surface_azimuth"][i]-180)+\ + "/"+str(self.plant_conf['surface_tilt'][i])+"/"+str(self.plant_conf['surface_azimuth'][i]-180)+\ "/"+str(self.retrieve_hass_conf["solar_forecast_kwp"]) response = get(url, headers=headers) '''import bz2 # Uncomment to save a serialized data for tests @@ -485,12 +485,12 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, cec_modules = cPickle.load(cec_modules) cec_inverters = bz2.BZ2File(self.emhass_conf['root_path'] / 'data' / 'cec_inverters.pbz2', "rb") cec_inverters = cPickle.load(cec_inverters) - if type(self.plant_conf['module_model']) == list: + if type(self.plant_conf['pv_module_model']) == list: P_PV_forecast = pd.Series(0, index=df_weather.index) - for i in range(len(self.plant_conf['module_model'])): + for i in range(len(self.plant_conf['pv_module_model'])): # Selecting correct module and inverter - module = cec_modules[self.plant_conf['module_model'][i]] - inverter = cec_inverters[self.plant_conf['inverter_model'][i]] + module = cec_modules[self.plant_conf['pv_module_model'][i]] + inverter = cec_inverters[self.plant_conf['pv_inverter_model'][i]] # Building the PV system in PVLib system = PVSystem(surface_tilt=self.plant_conf['surface_tilt'][i], surface_azimuth=self.plant_conf['surface_azimuth'][i], @@ -506,8 +506,8 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, P_PV_forecast = P_PV_forecast + mc.results.ac else: # Selecting correct module and inverter - module = cec_modules[self.plant_conf['module_model']] - inverter = cec_inverters[self.plant_conf['inverter_model']] + module = cec_modules[self.plant_conf['pv_module_model']] + inverter = cec_inverters[self.plant_conf['pv_inverter_model']] # Building the PV system in PVLib system = PVSystem(surface_tilt=self.plant_conf['surface_tilt'], surface_azimuth=self.plant_conf['surface_azimuth'], @@ -544,7 +544,7 @@ def get_forecast_days_csv(self, timedelta_days: Optional[int] = 1) -> pd.date_ra start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).ceil(freq=self.freq) else: self.logger.error("Wrong method_ts_round passed parameter") - end_forecast_csv = (start_forecast_csv + self.optim_conf['delta_forecast']).replace(microsecond=0) + end_forecast_csv = (start_forecast_csv + self.optim_conf['delta_forecast_daily']).replace(microsecond=0) forecast_dates_csv = pd.date_range(start=start_forecast_csv, end=end_forecast_csv+timedelta(days=timedelta_days)-self.freq, freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward') @@ -561,7 +561,7 @@ def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dat Get the forecast data as a DataFrame from a CSV file. The data contained in the CSV file should be a 24h forecast with the same frequency as - the main 'freq' parameter in the configuration file. The timestamp will not be used and + the main 'optimization_time_step' parameter in the configuration file. The timestamp will not be used and a new DateTimeIndex is generated to fit the timestamp index of the input data in 'df_final'. :param df_final: The DataFrame containing the input data. @@ -695,7 +695,7 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O with open(filename_path, 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) self.var_load = var_list[0] - self.retrieve_hass_conf['var_load'] = self.var_load + self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = self.var_load var_interp = [var_list[0]] self.var_list = [var_list[0]] self.var_load_new = self.var_load+'_positive' @@ -704,13 +704,13 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O if not rh.get_data(days_list, var_list): return False if not rh.prepare_data( - self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'], + self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], set_zero_min = self.retrieve_hass_conf['set_zero_min'], var_replace_zero = var_replace_zero, var_interp = var_interp): return False df = rh.df_final.copy()[[self.var_load_new]] if method == 'naive': # using a naive approach - mask_forecast_out = (df.index > days_list[-1] - self.optim_conf['delta_forecast']) + mask_forecast_out = (df.index > days_list[-1] - self.optim_conf['delta_forecast_daily']) forecast_out = df.copy().loc[mask_forecast_out] forecast_out = forecast_out.rename(columns={self.var_load_new: 'yhat'}) # Force forecast_out length to avoid mismatches @@ -812,13 +812,13 @@ def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = """ csv_path = self.emhass_conf['data_path'] / csv_path if method == 'hp_hc_periods': - df_final[self.var_load_cost] = self.optim_conf['load_cost_hc'] + df_final[self.var_load_cost] = self.optim_conf['load_offpeak_hours_cost'] list_df_hp = [] - for key, period_hp in self.optim_conf['list_hp_periods'].items(): + for key, period_hp in self.optim_conf['load_peak_hour_periods'].items(): list_df_hp.append(df_final[self.var_load_cost].between_time( period_hp[0]['start'], period_hp[1]['end'])) for df_hp in list_df_hp: - df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf['load_cost_hp'] + df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf['load_peak_hours_cost'] elif method == 'csv': forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( @@ -871,7 +871,7 @@ def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] """ csv_path = self.emhass_conf['data_path'] / csv_path if method == 'constant': - df_final[self.var_prod_price] = self.optim_conf['prod_sell_price'] + df_final[self.var_prod_price] = self.optim_conf['photovoltaic_production_sell_price'] elif method == 'csv': forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( diff --git a/src/emhass/optimization.py b/src/emhass/optimization.py index 3a646fb7..1af1cf00 100644 --- a/src/emhass/optimization.py +++ b/src/emhass/optimization.py @@ -66,12 +66,12 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, self.retrieve_hass_conf = retrieve_hass_conf self.optim_conf = optim_conf self.plant_conf = plant_conf - self.freq = self.retrieve_hass_conf['freq'] + self.freq = self.retrieve_hass_conf['optimization_time_step'] self.time_zone = self.retrieve_hass_conf['time_zone'] self.timeStep = self.freq.seconds/3600 # in hours self.time_delta = pd.to_timedelta(opt_time_delta, "hours") # The period of optimization - self.var_PV = self.retrieve_hass_conf['var_PV'] - self.var_load = self.retrieve_hass_conf['var_load'] + self.var_PV = self.retrieve_hass_conf['sensor_power_photovoltaics'] + self.var_load = self.retrieve_hass_conf['sensor_power_load_no_var_loads'] self.var_load_new = self.var_load+'_positive' self.costfun = costfun # self.emhass_conf = emhass_conf @@ -143,18 +143,18 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n if soc_final is not None: soc_init = soc_final else: - soc_init = self.plant_conf['SOCtarget'] + soc_init = self.plant_conf['battery_target_state_of_charge'] if soc_final is None: if soc_init is not None: soc_final = soc_init else: - soc_final = self.plant_conf['SOCtarget'] + soc_final = self.plant_conf['battery_target_state_of_charge'] if def_total_hours is None: - def_total_hours = self.optim_conf['def_total_hours'] + def_total_hours = self.optim_conf['operating_hours_of_each_deferrable_load'] if def_start_timestep is None: - def_start_timestep = self.optim_conf['def_start_timestep'] + def_start_timestep = self.optim_conf['start_timesteps_of_each_deferrable_load'] if def_end_timestep is None: - def_end_timestep = self.optim_conf['def_end_timestep'] + def_end_timestep = self.optim_conf['end_timesteps_of_each_deferrable_load'] type_self_conso = 'bigm' # maxmin #### The LP problem using Pulp #### @@ -166,19 +166,19 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n ## Add decision variables P_grid_neg = {(i):plp.LpVariable(cat='Continuous', - lowBound=-self.plant_conf['P_to_grid_max'], upBound=0, + lowBound=-self.plant_conf['maximum_power_to_grid'], upBound=0, name="P_grid_neg{}".format(i)) for i in set_I} P_grid_pos = {(i):plp.LpVariable(cat='Continuous', - lowBound=0, upBound=self.plant_conf['P_from_grid_max'], + lowBound=0, upBound=self.plant_conf['maximum_power_from_grid'], name="P_grid_pos{}".format(i)) for i in set_I} P_deferrable = [] P_def_bin1 = [] - for k in range(self.optim_conf['num_def_loads']): - if type(self.optim_conf['P_deferrable_nom'][k]) == list: - upBound = np.max(self.optim_conf['P_deferrable_nom'][k]) + for k in range(self.optim_conf['number_of_deferrable_loads']): + if type(self.optim_conf['nominal_power_of_deferrable_loads'][k]) == list: + upBound = np.max(self.optim_conf['nominal_power_of_deferrable_loads'][k]) else: - upBound = self.optim_conf['P_deferrable_nom'][k] - if self.optim_conf['treat_def_as_semi_cont'][k]: + upBound = self.optim_conf['nominal_power_of_deferrable_loads'][k] + if self.optim_conf['treat_deferrable_load_as_semi_cont'][k]: P_deferrable.append({(i):plp.LpVariable(cat='Continuous', name="P_deferrable{}_{}".format(k, i)) for i in set_I}) else: @@ -189,7 +189,7 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n name="P_def{}_bin1_{}".format(k, i)) for i in set_I}) P_def_start = [] P_def_bin2 = [] - for k in range(self.optim_conf['num_def_loads']): + for k in range(self.optim_conf['number_of_deferrable_loads']): P_def_start.append({(i):plp.LpVariable(cat='Binary', name="P_def{}_start_{}".format(k, i)) for i in set_I}) P_def_bin2.append({(i):plp.LpVariable(cat='Binary', @@ -200,10 +200,10 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n name="E_{}".format(i)) for i in set_I} if self.optim_conf['set_use_battery']: P_sto_pos = {(i):plp.LpVariable(cat='Continuous', - lowBound=0, upBound=self.plant_conf['Pd_max'], + lowBound=0, upBound=self.plant_conf['battery_discharge_power_max'], name="P_sto_pos_{0}".format(i)) for i in set_I} P_sto_neg = {(i):plp.LpVariable(cat='Continuous', - lowBound=-self.plant_conf['Pc_max'], upBound=0, + lowBound=-self.plant_conf['battery_charge_power_max'], upBound=0, name="P_sto_neg_{0}".format(i)) for i in set_I} else: P_sto_pos = {(i):i*0 for i in set_I} @@ -221,7 +221,7 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n ## Define objective P_def_sum= [] for i in set_I: - P_def_sum.append(plp.lpSum(P_deferrable[k][i] for k in range(self.optim_conf['num_def_loads']))) + P_def_sum.append(plp.lpSum(P_deferrable[k][i] for k in range(self.optim_conf['number_of_deferrable_loads']))) if self.costfun == 'profit': if self.optim_conf['set_total_pv_sell']: objective = plp.lpSum(-0.001*self.timeStep*(unit_load_cost[i]*(P_load[i] + P_def_sum[i]) + \ @@ -252,12 +252,12 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n self.optim_conf['weight_battery_charge']*P_sto_neg[i]) for i in set_I) # Add term penalizing each startup where configured - if ("def_start_penalty" in self.optim_conf and self.optim_conf["def_start_penalty"]): - for k in range(self.optim_conf["num_def_loads"]): - if (len(self.optim_conf["def_start_penalty"]) > k and self.optim_conf["def_start_penalty"][k]): + if ('set_deferrable_startup_penalty' in self.optim_conf and self.optim_conf['set_deferrable_startup_penalty']): + for k in range(self.optim_conf['number_of_deferrable_loads']): + if (len(self.optim_conf['set_deferrable_startup_penalty']) > k and self.optim_conf['set_deferrable_startup_penalty'][k]): objective = objective + plp.lpSum( - -0.001 * self.timeStep * self.optim_conf["def_start_penalty"][k] * P_def_start[k][i] *\ - unit_load_cost[i] * self.optim_conf['P_deferrable_nom'][k] + -0.001 * self.timeStep * self.optim_conf['set_deferrable_startup_penalty'][k] * P_def_start[k][i] *\ + unit_load_cost[i] * self.optim_conf['nominal_power_of_deferrable_loads'][k] for i in set_I) opt_model.setObjective(objective) @@ -288,24 +288,24 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n for i in set_I} # Constraint for hybrid inverter and curtailment cases - if type(self.plant_conf['module_model']) == list: + if type(self.plant_conf['pv_module_model']) == list: P_nom_inverter = 0.0 - for i in range(len(self.plant_conf['inverter_model'])): - if type(self.plant_conf['inverter_model'][i]) == str: + for i in range(len(self.plant_conf['pv_inverter_model'])): + if type(self.plant_conf['pv_inverter_model'][i]) == str: cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb") cec_inverters = cPickle.load(cec_inverters) - inverter = cec_inverters[self.plant_conf['inverter_model'][i]] + inverter = cec_inverters[self.plant_conf['pv_inverter_model'][i]] P_nom_inverter += inverter.Paco else: - P_nom_inverter += self.plant_conf['inverter_model'][i] + P_nom_inverter += self.plant_conf['pv_inverter_model'][i] else: - if type(self.plant_conf['inverter_model'][i]) == str: + if type(self.plant_conf['pv_inverter_model'][i]) == str: cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb") cec_inverters = cPickle.load(cec_inverters) - inverter = cec_inverters[self.plant_conf['inverter_model']] + inverter = cec_inverters[self.plant_conf['pv_inverter_model']] P_nom_inverter = inverter.Paco else: - P_nom_inverter = self.plant_conf['inverter_model'] + P_nom_inverter = self.plant_conf['pv_inverter_model'] if self.plant_conf['inverter_is_hybrid']: constraints.update({"constraint_hybrid_inverter1_{}".format(i) : plp.LpConstraint( @@ -347,26 +347,26 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n # Avoid injecting and consuming from grid at the same time constraints.update({"constraint_pgridpos_{}".format(i) : plp.LpConstraint( - e = P_grid_pos[i] - self.plant_conf['P_from_grid_max']*D[i], + e = P_grid_pos[i] - self.plant_conf['maximum_power_from_grid']*D[i], sense = plp.LpConstraintLE, rhs = 0) for i in set_I}) constraints.update({"constraint_pgridneg_{}".format(i) : plp.LpConstraint( - e = -P_grid_neg[i] - self.plant_conf['P_to_grid_max']*(1-D[i]), + e = -P_grid_neg[i] - self.plant_conf['maximum_power_to_grid']*(1-D[i]), sense = plp.LpConstraintLE, rhs = 0) for i in set_I}) # Treat deferrable loads constraints predicted_temps = {} - for k in range(self.optim_conf['num_def_loads']): + for k in range(self.optim_conf['number_of_deferrable_loads']): - if type(self.optim_conf['P_deferrable_nom'][k]) == list: + if type(self.optim_conf['nominal_power_of_deferrable_loads'][k]) == list: # Constraint for sequence of deferrable # WARNING: This is experimental, formulation seems correct but feasibility problems. # Probably uncomptabile with other constraints - power_sequence = self.optim_conf['P_deferrable_nom'][k] + power_sequence = self.optim_conf['nominal_power_of_deferrable_loads'][k] sequence_length = len(power_sequence) def create_matrix(input_list, n): matrix = [] @@ -421,7 +421,7 @@ def create_matrix(input_list, n): continue predicted_temp.append( predicted_temp[I-1] - + (P_deferrable[k][I-1] * (heating_rate * self.timeStep / self.optim_conf['P_deferrable_nom'][k])) + + (P_deferrable[k][I-1] * (heating_rate * self.timeStep / self.optim_conf['nominal_power_of_deferrable_loads'][k])) - (cooling_constant * (predicted_temp[I-1] - outdoor_temperature_forecast[I-1]))) if len(desired_temperatures) > I and desired_temperatures[I]: constraints.update({"constraint_defload{}_temperature_{}".format(k, I): @@ -448,7 +448,7 @@ def create_matrix(input_list, n): plp.LpConstraint( e = plp.lpSum(P_deferrable[k][i]*self.timeStep for i in set_I), sense = plp.LpConstraintEQ, - rhs = def_total_hours[k]*self.optim_conf['P_deferrable_nom'][k]) + rhs = def_total_hours[k]*self.optim_conf['nominal_power_of_deferrable_loads'][k]) }) # Ensure deferrable loads consume energy between def_start_timestep & def_end_timestep @@ -516,7 +516,7 @@ def create_matrix(input_list, n): for i in set_I}) # Treat deferrable as a fixed value variable with just one startup - if self.optim_conf['set_def_constant'][k]: + if self.optim_conf['set_deferrable_load_single_constant'][k]: # P_def_start[i] must be 1 for exactly 1 value of i constraints.update({"constraint_pdef{}_start4".format(k) : plp.LpConstraint( @@ -533,23 +533,23 @@ def create_matrix(input_list, n): }) # Treat deferrable load as a semi-continuous variable - if self.optim_conf['treat_def_as_semi_cont'][k]: + if self.optim_conf['treat_deferrable_load_as_semi_cont'][k]: constraints.update({"constraint_pdef{}_semicont1_{}".format(k, i) : plp.LpConstraint( - e=P_deferrable[k][i] - self.optim_conf['P_deferrable_nom'][k]*P_def_bin1[k][i], + e=P_deferrable[k][i] - self.optim_conf['nominal_power_of_deferrable_loads'][k]*P_def_bin1[k][i], sense=plp.LpConstraintGE, rhs=0) for i in set_I}) constraints.update({"constraint_pdef{}_semicont2_{}".format(k, i) : plp.LpConstraint( - e=P_deferrable[k][i] - self.optim_conf['P_deferrable_nom'][k]*P_def_bin1[k][i], + e=P_deferrable[k][i] - self.optim_conf['nominal_power_of_deferrable_loads'][k]*P_def_bin1[k][i], sense=plp.LpConstraintLE, rhs=0) for i in set_I}) # Treat the number of starts for a deferrable load (old method, kept here just in case) - # if self.optim_conf['set_def_constant'][k]: + # if self.optim_conf['set_deferrable_load_single_constant'][k]: # constraints.update({"constraint_pdef{}_start1_{}".format(k, i) : # plp.LpConstraint( # e=P_deferrable[k][i] - P_def_bin2[k][i]*M, @@ -592,53 +592,53 @@ def create_matrix(input_list, n): constraints.update({"constraint_pos_batt_dynamic_max_{}".format(i) : plp.LpConstraint(e = P_sto_pos[i+1] - P_sto_pos[i], sense = plp.LpConstraintLE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['Pd_max']) + rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['battery_discharge_power_max']) for i in range(n-1)}) constraints.update({"constraint_pos_batt_dynamic_min_{}".format(i) : plp.LpConstraint(e = P_sto_pos[i+1] - P_sto_pos[i], sense = plp.LpConstraintGE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['Pd_max']) + rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['battery_discharge_power_max']) for i in range(n-1)}) constraints.update({"constraint_neg_batt_dynamic_max_{}".format(i) : plp.LpConstraint(e = P_sto_neg[i+1] - P_sto_neg[i], sense = plp.LpConstraintLE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['Pc_max']) + rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['battery_charge_power_max']) for i in range(n-1)}) constraints.update({"constraint_neg_batt_dynamic_min_{}".format(i) : plp.LpConstraint(e = P_sto_neg[i+1] - P_sto_neg[i], sense = plp.LpConstraintGE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['Pc_max']) + rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['battery_charge_power_max']) for i in range(n-1)}) # Then the classic battery constraints constraints.update({"constraint_pstopos_{}".format(i) : plp.LpConstraint( - e=P_sto_pos[i] - self.plant_conf['eta_disch']*self.plant_conf['Pd_max']*E[i], + e=P_sto_pos[i] - self.plant_conf['battery_discharge_efficiency']*self.plant_conf['battery_discharge_power_max']*E[i], sense=plp.LpConstraintLE, rhs=0) for i in set_I}) constraints.update({"constraint_pstoneg_{}".format(i) : plp.LpConstraint( - e=-P_sto_neg[i] - (1/self.plant_conf['eta_ch'])*self.plant_conf['Pc_max']*(1-E[i]), + e=-P_sto_neg[i] - (1/self.plant_conf['battery_charge_efficiency'])*self.plant_conf['battery_charge_power_max']*(1-E[i]), sense=plp.LpConstraintLE, rhs=0) for i in set_I}) constraints.update({"constraint_socmax_{}".format(i) : plp.LpConstraint( - e=-plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['eta_disch']) + self.plant_conf['eta_ch']*P_sto_neg[j] for j in range(i)), + e=-plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[j] for j in range(i)), sense=plp.LpConstraintLE, - rhs=(self.plant_conf['Enom']/self.timeStep)*(self.plant_conf['SOCmax'] - soc_init)) + rhs=(self.plant_conf['battery_nominal_energy_capacity']/self.timeStep)*(self.plant_conf['battery_maximum_state_of_charge'] - soc_init)) for i in set_I}) constraints.update({"constraint_socmin_{}".format(i) : plp.LpConstraint( - e=plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['eta_disch']) + self.plant_conf['eta_ch']*P_sto_neg[j] for j in range(i)), + e=plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[j] for j in range(i)), sense=plp.LpConstraintLE, - rhs=(self.plant_conf['Enom']/self.timeStep)*(soc_init - self.plant_conf['SOCmin'])) + rhs=(self.plant_conf['battery_nominal_energy_capacity']/self.timeStep)*(soc_init - self.plant_conf['battery_minimum_state_of_charge'])) for i in set_I}) constraints.update({"constraint_socfinal_{}".format(0) : plp.LpConstraint( - e=plp.lpSum(P_sto_pos[i]*(1/self.plant_conf['eta_disch']) + self.plant_conf['eta_ch']*P_sto_neg[i] for i in set_I), + e=plp.lpSum(P_sto_pos[i]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[i] for i in set_I), sense=plp.LpConstraintEQ, - rhs=(soc_init - soc_final)*self.plant_conf['Enom']/self.timeStep) + rhs=(soc_init - soc_final)*self.plant_conf['battery_nominal_energy_capacity']/self.timeStep) }) opt_model.constraints = constraints @@ -667,16 +667,16 @@ def create_matrix(input_list, n): opt_tp = pd.DataFrame() opt_tp["P_PV"] = [P_PV[i] for i in set_I] opt_tp["P_Load"] = [P_load[i] for i in set_I] - for k in range(self.optim_conf['num_def_loads']): + for k in range(self.optim_conf['number_of_deferrable_loads']): opt_tp["P_deferrable{}".format(k)] = [P_deferrable[k][i].varValue for i in set_I] opt_tp["P_grid_pos"] = [P_grid_pos[i].varValue for i in set_I] opt_tp["P_grid_neg"] = [P_grid_neg[i].varValue for i in set_I] opt_tp["P_grid"] = [P_grid_pos[i].varValue + P_grid_neg[i].varValue for i in set_I] if self.optim_conf['set_use_battery']: opt_tp["P_batt"] = [P_sto_pos[i].varValue + P_sto_neg[i].varValue for i in set_I] - SOC_opt_delta = [(P_sto_pos[i].varValue*(1/self.plant_conf['eta_disch']) + \ - self.plant_conf['eta_ch']*P_sto_neg[i].varValue)*( - self.timeStep/(self.plant_conf['Enom'])) for i in set_I] + SOC_opt_delta = [(P_sto_pos[i].varValue*(1/self.plant_conf['battery_discharge_efficiency']) + \ + self.plant_conf['battery_charge_efficiency']*P_sto_neg[i].varValue)*( + self.timeStep/(self.plant_conf['battery_nominal_energy_capacity'])) for i in set_I] SOCinit = copy.copy(soc_init) SOC_opt = [] for i in set_I: @@ -692,7 +692,7 @@ def create_matrix(input_list, n): # Lets compute the optimal cost function P_def_sum_tp = [] for i in set_I: - P_def_sum_tp.append(sum(P_deferrable[k][i].varValue for k in range(self.optim_conf['num_def_loads']))) + P_def_sum_tp.append(sum(P_deferrable[k][i].varValue for k in range(self.optim_conf['number_of_deferrable_loads']))) opt_tp["unit_load_cost"] = [unit_load_cost[i] for i in set_I] opt_tp["unit_prod_price"] = [unit_prod_price[i] for i in set_I] if self.optim_conf['set_total_pv_sell']: @@ -728,7 +728,7 @@ def create_matrix(input_list, n): # Debug variables if debug: - for k in range(self.optim_conf["num_def_loads"]): + for k in range(self.optim_conf['number_of_deferrable_loads']): opt_tp[f"P_def_start_{k}"] = [P_def_start[k][i].varValue for i in set_I] opt_tp[f"P_def_bin2_{k}"] = [P_def_bin2[k][i].varValue for i in set_I] for i, predicted_temp in predicted_temps.items(): diff --git a/src/emhass/utils.py b/src/emhass/utils.py index 9983dd66..a03996e4 100644 --- a/src/emhass/utils.py +++ b/src/emhass/utils.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import csv +import os from typing import Tuple, Optional from datetime import datetime, timedelta, timezone import logging @@ -9,6 +11,7 @@ import copy import numpy as np import pandas as pd +from requests import get import yaml import pytz import ast @@ -19,7 +22,6 @@ from emhass.machine_learning_forecaster import MLForecaster - def get_root(file: str, num_parent: Optional[int] = 3) -> str: """ Get the root absolute path of the working directory. @@ -138,13 +140,14 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic """ if (params != None) and (params != "null"): - params = json.loads(params) + if type(params) is str: + params = json.loads(params) else: params = {} # Some default data needed custom_deferrable_forecast_id = [] custom_predicted_temperature_id = [] - for k in range(optim_conf["num_def_loads"]): + for k in range(optim_conf['number_of_deferrable_loads']): custom_deferrable_forecast_id.append( { "entity_id": "sensor.p_deferrable{}".format(k), @@ -225,9 +228,10 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic else: params["passed_data"] = default_passed_dict if runtimeparams is not None: - runtimeparams = json.loads(runtimeparams) - freq = int(retrieve_hass_conf["freq"].seconds / 60.0) - delta_forecast = int(optim_conf["delta_forecast"].days) + if type(runtimeparams) is str: + runtimeparams = json.loads(runtimeparams) + freq = int(retrieve_hass_conf['optimization_time_step'].seconds / 60.0) + delta_forecast = int(optim_conf['delta_forecast_daily'].days) forecast_dates = get_forecast_dates(freq, delta_forecast) if set_type == "regressor-model-fit": if "csv_file" in runtimeparams: @@ -270,41 +274,41 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic prediction_horizon = runtimeparams["prediction_horizon"] params["passed_data"]["prediction_horizon"] = prediction_horizon if "soc_init" not in runtimeparams.keys(): - soc_init = plant_conf["SOCtarget"] + soc_init = plant_conf['battery_target_state_of_charge'] else: soc_init = runtimeparams["soc_init"] params["passed_data"]["soc_init"] = soc_init if "soc_final" not in runtimeparams.keys(): - soc_final = plant_conf["SOCtarget"] + soc_final = plant_conf['battery_target_state_of_charge'] else: soc_final = runtimeparams["soc_final"] params["passed_data"]["soc_final"] = soc_final - if "def_total_hours" not in runtimeparams.keys(): - def_total_hours = optim_conf["def_total_hours"] + if 'operating_hours_of_each_deferrable_load' not in runtimeparams.keys(): + def_total_hours = optim_conf['operating_hours_of_each_deferrable_load'] else: - def_total_hours = runtimeparams["def_total_hours"] - params["passed_data"]["def_total_hours"] = def_total_hours - if "def_start_timestep" not in runtimeparams.keys(): - def_start_timestep = optim_conf["def_start_timestep"] + def_total_hours = runtimeparams['operating_hours_of_each_deferrable_load'] + params["passed_data"]['operating_hours_of_each_deferrable_load'] = def_total_hours + if 'start_timesteps_of_each_deferrable_load' not in runtimeparams.keys(): + def_start_timestep = optim_conf['start_timesteps_of_each_deferrable_load'] else: - def_start_timestep = runtimeparams["def_start_timestep"] - params["passed_data"]["def_start_timestep"] = def_start_timestep - if "def_end_timestep" not in runtimeparams.keys(): - def_end_timestep = optim_conf["def_end_timestep"] + def_start_timestep = runtimeparams['start_timesteps_of_each_deferrable_load'] + params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep + if 'end_timesteps_of_each_deferrable_load' not in runtimeparams.keys(): + def_end_timestep = optim_conf['end_timesteps_of_each_deferrable_load'] else: - def_end_timestep = runtimeparams["def_end_timestep"] - params["passed_data"]["def_end_timestep"] = def_end_timestep + def_end_timestep = runtimeparams['end_timesteps_of_each_deferrable_load'] + params["passed_data"]['end_timesteps_of_each_deferrable_load'] = def_end_timestep forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon] else: params["passed_data"]["prediction_horizon"] = None params["passed_data"]["soc_init"] = None params["passed_data"]["soc_final"] = None - params["passed_data"]["def_total_hours"] = None - params["passed_data"]["def_start_timestep"] = None - params["passed_data"]["def_end_timestep"] = None + params["passed_data"]['operating_hours_of_each_deferrable_load'] = None + params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None + params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None # Treat passed forecast data lists list_forecast_key = ['pv_power_forecast', 'load_power_forecast', 'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast'] - forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', 'prod_price_forecast_method', 'outdoor_temperature_forecast_method'] + forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', 'production_price_forecast_method', 'outdoor_temperature_forecast_method'] # Param to save forecast cache (i.e. Solcast) if "weather_forecast_cache" not in runtimeparams.keys(): weather_forecast_cache = False @@ -333,11 +337,11 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic else: params['passed_data'][forecast_key] = None # Treat passed data for forecast model fit/predict/tune at runtime - if "days_to_retrieve" not in runtimeparams.keys(): + if 'historic_days_to_retrieve' not in runtimeparams.keys(): days_to_retrieve = 9 else: - days_to_retrieve = runtimeparams["days_to_retrieve"] - params["passed_data"]["days_to_retrieve"] = days_to_retrieve + days_to_retrieve = runtimeparams['historic_days_to_retrieve'] + params["passed_data"]['historic_days_to_retrieve'] = days_to_retrieve if "model_type" not in runtimeparams.keys(): model_type = "load_forecast" else: @@ -420,67 +424,67 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic beta = runtimeparams["beta"] params["passed_data"]["beta"] = beta # Treat optimization configuration parameters passed at runtime - if "num_def_loads" in runtimeparams.keys(): - optim_conf["num_def_loads"] = runtimeparams["num_def_loads"] - if "P_deferrable_nom" in runtimeparams.keys(): - optim_conf["P_deferrable_nom"] = runtimeparams["P_deferrable_nom"] - if "def_total_hours" in runtimeparams.keys(): - optim_conf["def_total_hours"] = runtimeparams["def_total_hours"] - if "def_start_timestep" in runtimeparams.keys(): - optim_conf["def_start_timestep"] = runtimeparams["def_start_timestep"] - if "def_end_timestep" in runtimeparams.keys(): - optim_conf["def_end_timestep"] = runtimeparams["def_end_timestep"] + if 'number_of_deferrable_loads' in runtimeparams.keys(): + optim_conf['number_of_deferrable_loads'] = runtimeparams['number_of_deferrable_loads'] + if 'nominal_power_of_deferrable_loads' in runtimeparams.keys(): + optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['nominal_power_of_deferrable_loads'] + if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys(): + optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['operating_hours_of_each_deferrable_load'] + if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys(): + optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams['start_timesteps_of_each_deferrable_load'] + if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys(): + optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams['end_timesteps_of_each_deferrable_load'] if "def_current_state" in runtimeparams.keys(): optim_conf["def_current_state"] = [bool(s) for s in runtimeparams["def_current_state"]] - if "treat_def_as_semi_cont" in runtimeparams.keys(): - optim_conf["treat_def_as_semi_cont"] = [ + if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys(): + optim_conf['treat_deferrable_load_as_semi_cont'] = [ ast.literal_eval(str(k).capitalize()) - for k in runtimeparams["treat_def_as_semi_cont"] + for k in runtimeparams['treat_deferrable_load_as_semi_cont'] ] - if "set_def_constant" in runtimeparams.keys(): - optim_conf["set_def_constant"] = [ - ast.literal_eval(str(k).capitalize()) for k in runtimeparams["set_def_constant"] + if 'set_deferrable_load_single_constant' in runtimeparams.keys(): + optim_conf['set_deferrable_load_single_constant'] = [ + ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_load_single_constant'] ] - if "def_start_penalty" in runtimeparams.keys(): - optim_conf["def_start_penalty"] = [ - ast.literal_eval(str(k).capitalize()) for k in runtimeparams["def_start_penalty"] + if 'set_deferrable_startup_penalty' in runtimeparams.keys(): + optim_conf['set_deferrable_startup_penalty'] = [ + ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_startup_penalty'] ] if 'def_load_config' in runtimeparams: optim_conf["def_load_config"] = runtimeparams['def_load_config'] if "solcast_api_key" in runtimeparams.keys(): retrieve_hass_conf["solcast_api_key"] = runtimeparams["solcast_api_key"] - optim_conf["weather_forecast_method"] = "solcast" + optim_conf['weather_forecast_method'] = "solcast" if "solcast_rooftop_id" in runtimeparams.keys(): retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[ "solcast_rooftop_id" ] - optim_conf["weather_forecast_method"] = "solcast" + optim_conf['weather_forecast_method'] = "solcast" if "solar_forecast_kwp" in runtimeparams.keys(): retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[ "solar_forecast_kwp" ] - optim_conf["weather_forecast_method"] = "solar.forecast" - if "weight_battery_discharge" in runtimeparams.keys(): - optim_conf["weight_battery_discharge"] = runtimeparams[ - "weight_battery_discharge" + optim_conf['weather_forecast_method'] = "solar.forecast" + if 'weight_battery_discharge' in runtimeparams.keys(): + optim_conf['weight_battery_discharge'] = runtimeparams[ + 'weight_battery_discharge' ] - if "weight_battery_charge" in runtimeparams.keys(): - optim_conf["weight_battery_charge"] = runtimeparams["weight_battery_charge"] - if 'freq' in runtimeparams.keys(): - retrieve_hass_conf['freq'] = pd.to_timedelta(runtimeparams['freq'], "minutes") + if 'weight_battery_charge' in runtimeparams.keys(): + optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge'] + if 'optimization_time_step' in runtimeparams.keys(): + retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(runtimeparams['optimization_time_step'], "minutes") if 'continual_publish' in runtimeparams.keys(): retrieve_hass_conf['continual_publish'] = bool(runtimeparams['continual_publish']) # Treat plant configuration parameters passed at runtime - if "SOCmin" in runtimeparams.keys(): - plant_conf["SOCmin"] = runtimeparams["SOCmin"] - if "SOCmax" in runtimeparams.keys(): - plant_conf["SOCmax"] = runtimeparams["SOCmax"] - if "SOCtarget" in runtimeparams.keys(): - plant_conf["SOCtarget"] = runtimeparams["SOCtarget"] - if "Pd_max" in runtimeparams.keys(): - plant_conf["Pd_max"] = runtimeparams["Pd_max"] - if "Pc_max" in runtimeparams.keys(): - plant_conf["Pc_max"] = runtimeparams["Pc_max"] + if 'battery_minimum_state_of_charge' in runtimeparams.keys(): + plant_conf['battery_minimum_state_of_charge'] = runtimeparams['battery_minimum_state_of_charge'] + if 'battery_maximum_state_of_charge' in runtimeparams.keys(): + plant_conf['battery_maximum_state_of_charge'] = runtimeparams['battery_maximum_state_of_charge'] + if 'battery_target_state_of_charge' in runtimeparams.keys(): + plant_conf['battery_target_state_of_charge'] = runtimeparams['battery_target_state_of_charge'] + if 'battery_discharge_power_max' in runtimeparams.keys(): + plant_conf['battery_discharge_power_max'] = runtimeparams['battery_discharge_power_max'] + if 'battery_charge_power_max' in runtimeparams.keys(): + plant_conf['battery_charge_power_max'] = runtimeparams['battery_charge_power_max'] # Treat custom entities id's and friendly names for variables if "custom_pv_forecast_id" in runtimeparams.keys(): params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[ @@ -547,56 +551,46 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic entity_save = runtimeparams["entity_save"] params["passed_data"]["entity_save"] = entity_save # Serialize the final params - params = json.dumps(params) + params = json.dumps(params, default=str) return params, retrieve_hass_conf, optim_conf, plant_conf -def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True, - params: Optional[str] = None) -> Tuple[dict, dict, dict]: +def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]: """ Perform parsing of the config.yaml file. - :param emhass_conf: Dictionary containing the needed emhass paths - :type emhass_conf: dict - :param use_secrets: Indicate if we should use a secrets file or not. - Set to False for unit tests. - :type use_secrets: bool, optional - :param params: Configuration parameters passed from data/options.json + :param params: Configuration parameters passed from config :type params: str + :param logger: The logger object + :type logger: logging.Logger :return: A tuple with the dictionaries containing the parsed data :rtype: tuple(dict) """ - input_conf = json.loads(params) - - input_secrets = input_conf.pop("params_secrets", None) - - retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {}) - - if use_secrets: - retrieve_hass_conf.update(input_secrets) + if params: + if type(params) is str: + input_conf = json.loads(params) + else: + input_conf = params else: - retrieve_hass_conf["hass_url"] = "http://supervisor/core/api" - retrieve_hass_conf["long_lived_token"] = "${SUPERVISOR_TOKEN}" - retrieve_hass_conf["time_zone"] = "Europe/Paris" - retrieve_hass_conf["lat"] = 45.83 - retrieve_hass_conf["lon"] = 6.86 - retrieve_hass_conf["alt"] = 4807.8 - retrieve_hass_conf["freq"] = pd.to_timedelta(retrieve_hass_conf["freq"], "minutes") - retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"]) + input_conf = {} + logger.error("No params have been detected for get_yaml_parse") + return False, False, False optim_conf = input_conf.get("optim_conf", {}) - # Format list_hp_periods - optim_conf["list_hp_periods"] = dict( - (key, d[key]) for d in optim_conf["list_hp_periods"] for key in d - ) - - #Format delta_forecast - optim_conf["delta_forecast"] = pd.Timedelta(days=optim_conf["delta_forecast"]) + retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {}) plant_conf = input_conf.get("plant_conf", {}) + # Format time parameters + if optim_conf.get('delta_forecast_daily',None) is not None: + optim_conf['delta_forecast_daily'] = pd.Timedelta(days= optim_conf['delta_forecast_daily']) + if retrieve_hass_conf.get('optimization_time_step',None) is not None: + retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(retrieve_hass_conf['optimization_time_step'], "minutes") + if retrieve_hass_conf.get('time_zone',None) is not None: + retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"]) + return retrieve_hass_conf, optim_conf, plant_conf @@ -746,143 +740,304 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF injection_dict["figure_0"] = image_path_0 return injection_dict +def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: Optional[str] = None, config_path: Optional[str] = None, + legacy_config_path: Optional[str] = None) -> dict: + """ + Retrieve parameters from configuration files. + priority order = legacy_config_path, config_path, defaults_path + + :param emhass_conf: Dictionary containing the needed emhass paths + :type emhass_conf: dict + :param logger: The logger object + :type logger: logging.Logger + :param defaults_path: path to config file storing parameter defaults (config_defaults.json) + :type defaults_path: str + :param config_path: path to the main configuration file (config.json) + :type config_path: str + :param legacy_config_path: path to legacy config file (if its provided) (config_emhass.yaml) + :type legacy_config_path: dict + :return: The built config dictionary + :rtype: dict + """ + + # Read default parameters + if defaults_path and pathlib.Path(defaults_path).is_file(): + with defaults_path.open('r') as data: + config = json.load(data) + else: + logger.error("config_defaults. does not exist") + raise Exception("config_defaults. does not exist in path: "+str(defaults_path)) + + # Read user config parameters + if config_path and pathlib.Path(config_path).is_file(): + with config_path.open('r') as data: + # Set override default parameters (config_defaults) with user given parameters (config.json) + config.update(json.load(data)) + else: + logger.error("config.json does not exist") + raise Exception("config.json does not exist in path: "+str(config_path)) + + # Check to see if legacy config_emhass.yaml was provided + if legacy_config_path and pathlib.Path(legacy_config_path).is_file(): + with open(legacy_config_path, 'r') as file: + legacy_config = yaml.load(file, Loader=yaml.FullLoader) + legacy_config_parameters = build_legacy_config_params(emhass_conf,legacy_config,logger) + if type(legacy_config_parameters) is not bool: + config.update(legacy_config_parameters) + + return config + -def build_params(params: dict, params_secrets: dict, options: dict, +def build_legacy_config_params(emhass_conf: dict, legacy_config: dict, logger: logging.Logger) -> dict: """ - Build the main params dictionary from the loaded options.json. + Build the config dictionary with legacy config_emhass.yaml - :param params: The main params dictionary - :type params: dict + :param emhass_conf: Dictionary containing the needed emhass paths + :type emhass_conf: dict + :param legacy_config: The legacy config parameters dictionary + :type legacy_config: dict + :param logger: The logger object + :type logger: logging.Logger + :return: The built config dictionary + :rtype: dict + """ + legacy_config['retrieve_hass_conf'] = legacy_config.get('retrieve_hass_conf',{}) + legacy_config['optim_conf'] = legacy_config.get('optim_conf',{}) + legacy_config['plant_conf'] = legacy_config.get('plant_conf',{}) + config = {} + + # Use associations list to map legacy parameter name with new parameter name + if emhass_conf['associations_path'].exists(): + with emhass_conf['associations_path'].open('r') as data: + associations = list(csv.reader(data, delimiter=",")) + else: + logger.error("Cant find associations file (associations.csv) in: " + str(emhass_conf['associations_path'])) + return False + + logger.debug("Overriding parameters from config_emhass.yaml:") + + # Append config with legacy config parameters (converting alternative parameter naming conventions with associations list) + for association in associations: + if legacy_config.get(association[0],None) is not None and legacy_config[0].get(association[1],None) is not None: + config[2] = legacy_config[0][1] + + return config + # params['associations_dict'] = associations_dict + +def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[dict] = {}, options_path: Optional[str] = None, + secrets_path: Optional[str] = None, no_response: Optional[bool] = False) -> Tuple[dict, dict]: + """ + Retrieve parameters from secrets locations (ENV,ARG, Secrets file (secrets_emhass.yaml) and/or Home Assistant (via API)) + priority order = defaults (written bellow), ENV, options file, secrets file, Home Assistant API. arguments + + :param emhass_conf: Dictionary containing the needed emhass paths + :type emhass_conf: dict + :param logger: The logger object + :type logger: logging.Logger + :param argument: dictionary of secrets arguments passed (url,key) + :type argument: dict + :param options_path: path to the options file (options.json) (provided bt EMHASS-Add-on) + :type options_path: str + :param secrets_path: path to secrets file (secrets_emhass.yaml) + :type secrets_path: str + :param secrets_path: bypass get request to Home Assistant (json response errors) + :type secrets_path: bool + :return: The built secrets dictionary + :rtype: Tuple[dict, dict]: + """ + + #Set defaults to be overwritten + params_secrets = { + "hass_url": "https://myhass.duckdns.org/", + "long_lived_token": "thatverylongtokenhere", + "time_zone": "Europe/Paris", + "lat": 45.83, + "lon": 6.86, + "alt": 4807.8, + "solcast_api_key": "yoursecretsolcastapikey", + "solcast_rooftop_id": "yourrooftopid", + "solar_forecast_kwp": 5 + } + + # Obtain Secrets from ARG or ENV? + params_secrets['hass_url'] = os.getenv("EMHASS_URL",params_secrets['hass_url']) + params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN", params_secrets['long_lived_token']) + params_secrets['time_zone'] = os.getenv("TIME_ZONE", default="Europe/Paris") + params_secrets['Latitude'] = float(os.getenv("LAT", default="45.83")) + params_secrets['Longitude'] = float(os.getenv("LON", default="6.86")) + params_secrets['Altitude'] = float(os.getenv("ALT", default="4807.8")) + + # Obtain secrets from options.json (from EMHASS-Add-on) + options = {} + if options_path and pathlib.Path(options_path).is_file(): + with options_path.open('r') as data: + options = json.load(data) + + # Obtain secrets from Home Assistant? + url_from_options = options.get('hass_url', 'empty') + key_from_options = options.get('long_lived_token', 'empty') + + # If data path specified by options + if options is not None: + if options.get('data_path', None) != None and pathlib.Path(options['data_path']).exists(): + emhass_conf['data_path'] = pathlib.Path(options['data_path']); + + # Check to use Home Assistant local API + if (url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and os.getenv("SUPERVISOR_TOKEN", None) is not None: + params_secrets['hass_url'] = "http://supervisor/core/api/config" + headers = { + "Authorization": "Bearer " + params_secrets['long_lived_token'], + "content-type": "application/json" + } + if not no_response: + # Obtain secrets from Home Assistant via API + logger.debug("Obtaining secrets from Home Assistant API") + response = get(params_secrets['hass_url'], headers=headers) + if response.status_code < 400: + config_hass = response.json() + params_secrets = { + 'hass_url': params_secrets['hass_url'], + 'long_lived_token': params_secrets['long_lived_token'], + 'time_zone': config_hass['time_zone'], + 'Latitude': config_hass['latitude'], + 'Longitude': config_hass['longitude'], + 'Altitude': config_hass['elevation'] + } + else: + params_secrets['hass_url'] = url_from_options + # Obtain secrets if any from options.json + logger.debug("Obtaining secrets from options.json") + if options.get('time_zone',None) is not None or options['time_zone'] == "empty": + params_secrets['time_zone'] = options['time_zone'] + if options.get('time_zone',None) is not None: + params_secrets['Latitude'] = options['Latitude'] + if options.get('Longitude',None) is not None: + params_secrets['Longitude'] = options['Longitude'] + if options.get('Altitude',None) is not None: + params_secrets['Altitude'] = options['Altitude'] + if key_from_options == 'empty' or key_from_options == '': + params_secrets['long_lived_token'] = key_from_options + + # Obtain secrets from secrets_emhass.yaml? + if secrets_path and pathlib.Path(secrets_path).is_file(): + logger.debug("Obtaining secrets from secrets file") + with open(pathlib.Path(secrets_path), 'r') as file: + params_secrets.update(yaml.load(file, Loader=yaml.FullLoader)) + elif pathlib.Path(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml')).is_file(): + logger.debug("Obtaining secrets from secrets file") + with open(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), 'r') as file: + params_secrets.update(yaml.load(file, Loader=yaml.FullLoader)) + + # Receive key and url from arguments + if argument.get('url',None) is not None: + params_secrets['hass_url'] = argument['url'] + logger.debug("Obtaining url from passed argument") + if argument.get('key',None) is not None: + params_secrets['long_lived_token'] = argument['key'] + logger.debug("Obtaining long_lived_token from passed argument") + + return emhass_conf, params_secrets + + + +def build_params(emhass_conf: dict, params_secrets: dict, config: dict, + logger: logging.Logger) -> dict: + """ + Build the main params dictionary from the loaded config + + :param emhass_conf: Dictionary containing the needed emhass paths + :type emhass_conf: dict :param params_secrets: The dictionary containing the secret protected variables :type params_secrets: dict - :param options: The load dictionary from options.json - :type options: dict + :param config: The dictionary of pre-built parameters (config) + :type config: dict :param logger: The logger object :type logger: logging.Logger :return: The builded dictionary :rtype: dict """ - params['params_secrets'] = params_secrets - params['retrieve_hass_conf'] = params.get('retrieve_hass_conf',{}) - params['optim_conf'] = params.get('optim_conf',{}) - params['plant_conf'] = params.get('plant_conf',{}) - - associations = [] - associations_dict = {} - associations.append(['retrieve_hass_conf', 'freq', 'optimization_time_step']) - associations.append(['retrieve_hass_conf', 'days_to_retrieve', 'historic_days_to_retrieve']) - associations.append(['retrieve_hass_conf', 'var_PV', 'sensor_power_photovoltaics']) - associations.append(['retrieve_hass_conf', 'var_load', 'sensor_power_load_no_var_loads']) - associations.append(['retrieve_hass_conf', 'load_negative', 'load_negative']) - associations.append(['retrieve_hass_conf', 'set_zero_min', 'set_zero_min']) - associations.append(['retrieve_hass_conf', 'var_replace_zero', 'list_sensor_replace_zero','sensor_replace_zero']) - associations.append(['retrieve_hass_conf', 'var_interp', 'list_sensor_linear_interp','sensor_linear_interp']) - associations.append(['retrieve_hass_conf', 'method_ts_round', 'method_ts_round']) - associations.append(['retrieve_hass_conf', 'continual_publish', 'continual_publish']) - associations.append(['params_secrets', 'time_zone', 'time_zone']) - associations.append(['params_secrets', 'lat', 'Latitude']) - associations.append(['params_secrets', 'lon', 'Longitude']) - associations.append(['params_secrets', 'alt', 'Altitude']) - associations.append(['optim_conf', 'set_use_battery', 'set_use_battery']) - associations.append(['optim_conf', 'num_def_loads', 'number_of_deferrable_loads']) - associations.append(['optim_conf', 'num_def_loads', 'number_of_deferrable_loads']) - associations.append(['optim_conf', 'P_deferrable_nom', 'list_nominal_power_of_deferrable_loads','nominal_power_of_deferrable_loads']) - associations.append(['optim_conf', 'def_total_hours', 'list_operating_hours_of_each_deferrable_load','operating_hours_of_each_deferrable_load']) - associations.append(['optim_conf', 'treat_def_as_semi_cont', 'list_treat_deferrable_load_as_semi_cont','treat_deferrable_load_as_semi_cont']) - associations.append(['optim_conf', 'set_def_constant', 'list_set_deferrable_load_single_constant','set_deferrable_load_single_constant']) - associations.append(['optim_conf', 'def_start_penalty', 'list_set_deferrable_startup_penalty','set_deferrable_startup_penalty']) - associations.append(['optim_conf', 'def_start_penalty', 'list_set_deferrable_startup_penalty','set_deferrable_startup_penalty']) - associations.append(['optim_conf', 'delta_forecast', 'delta_forecast_daily']) - associations.append(['optim_conf', 'load_forecast_method', 'load_forecast_method']) - associations.append(['optim_conf', 'load_cost_forecast_method', 'load_cost_forecast_method']) - associations.append(['optim_conf', 'load_cost_hp', 'load_peak_hours_cost']) - associations.append(['optim_conf', 'load_cost_hc', 'load_offpeak_hours_cost']) - associations.append(['optim_conf', 'prod_price_forecast_method', 'production_price_forecast_method']) - associations.append(['optim_conf', 'prod_sell_price', 'photovoltaic_production_sell_price']) - associations.append(['optim_conf', 'set_total_pv_sell', 'set_total_pv_sell']) - associations.append(['optim_conf', 'lp_solver', 'lp_solver']) - associations.append(['optim_conf', 'lp_solver_path', 'lp_solver_path']) - associations.append(['optim_conf', 'set_nocharge_from_grid', 'set_nocharge_from_grid']) - associations.append(['optim_conf', 'set_nodischarge_to_grid', 'set_nodischarge_to_grid']) - associations.append(['optim_conf', 'set_battery_dynamic', 'set_battery_dynamic']) - associations.append(['optim_conf', 'battery_dynamic_max', 'battery_dynamic_max']) - associations.append(['optim_conf', 'battery_dynamic_min', 'battery_dynamic_min']) - associations.append(['optim_conf', 'weight_battery_discharge', 'weight_battery_discharge']) - associations.append(['optim_conf', 'weight_battery_charge', 'weight_battery_charge']) - associations.append(['optim_conf', 'weather_forecast_method', 'weather_forecast_method']) - associations.append(['optim_conf', 'def_start_timestep', 'list_start_timesteps_of_each_deferrable_load','start_timesteps_of_each_deferrable_load']) - associations.append(['optim_conf', 'def_end_timestep', 'list_end_timesteps_of_each_deferrable_load','end_timesteps_of_each_deferrable_load']) - associations.append(['plant_conf', 'P_from_grid_max', 'maximum_power_from_grid']) - associations.append(['plant_conf', 'P_to_grid_max', 'maximum_power_to_grid']) - associations.append(['plant_conf', 'module_model', 'list_pv_module_model','pv_module_model']) - associations.append(['plant_conf', 'inverter_model', 'list_pv_inverter_model','pv_inverter_model']) - associations.append(['plant_conf', 'surface_tilt', 'list_surface_tilt','surface_tilt']) - associations.append(['plant_conf', 'surface_azimuth', 'list_surface_azimuth','surface_azimuth']) - associations.append(['plant_conf', 'modules_per_string', 'list_modules_per_string','modules_per_string']) - associations.append(['plant_conf', 'strings_per_inverter', 'list_strings_per_inverter','strings_per_inverter']) - associations.append(['plant_conf', 'inverter_is_hybrid', 'inverter_is_hybrid']) - associations.append(['plant_conf', 'compute_curtailment', 'compute_curtailment']) - associations.append(['plant_conf', 'Pd_max', 'battery_discharge_power_max']) - associations.append(['plant_conf', 'Pc_max', 'battery_charge_power_max']) - associations.append(['plant_conf', 'eta_disch', 'battery_discharge_efficiency']) - associations.append(['plant_conf', 'eta_ch', 'battery_charge_efficiency']) - associations.append(['plant_conf', 'Enom', 'battery_nominal_energy_capacity']) - associations.append(['plant_conf', 'SOCmin', 'battery_minimum_state_of_charge']) - associations.append(['plant_conf', 'SOCmax', 'battery_maximum_state_of_charge']) - associations.append(['plant_conf', 'SOCtarget', 'battery_target_state_of_charge']) - - logger.debug("Overriding config parameters with optional parameters with associations:") - for i in associations: - #logger.info(str(i[1]) +":" + str(params[i[0]][i[1]])) - #check if params parameter has multiple options - if type(i[2]) is list: - params[i[0]][i[1]] = [] - for j in i[2]: - params[i[0]][i[1]].append(options[j]) - # Check if options are null - elif options.get(i[2],None) is not None: - # Check if options parameter is list - if len(i) == 4: - params[i[0]][i[1]] = [x[i[3]] for x in options[i[2]]] - elif type(options[i[2]]) is list: - params[i[0]][i[1]] = [options[i[2]]] - else: - params[i[0]][i[1]] = options[i[2]] - associations_dict.update({i[1]:i[2]}) - #logger.info(str(i[1]) +":" + str(params[i[0]][i[1]])) + if type(params_secrets) is not dict: + params_secrets = {} + + params = {} + params['retrieve_hass_conf'] = {} + params['params_secrets'] = {} + params['optim_conf'] = {} + params['plant_conf'] = {} - # Update optional param secrets - if params["optim_conf"]["weather_forecast_method"] == "solcast": - params["params_secrets"]["solcast_api_key"] = options.get("optional_solcast_api_key", params_secrets.get("solcast_api_key", "123456")) - params["params_secrets"]["solcast_rooftop_id"] = options.get("optional_solcast_rooftop_id", params_secrets.get("solcast_rooftop_id", "123456")) - elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast": - params["params_secrets"]["solar_forecast_kwp"] = options.get("optional_solar_forecast_kwp", params_secrets.get("solar_forecast_kwp", 5)) - # Make and set list_hp_periods - if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None): - start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]] - end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]] - num_peak_hours = len(start_hours_list) - list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)] - params['optim_conf']['list_hp_periods'] = list_hp_periods_list + if emhass_conf.get('associations_path', get_root(__file__, num_parent=2) / 'data/associations.csv').exists(): + with emhass_conf['associations_path'].open('r') as data: + associations = list(csv.reader(data, delimiter=",")) + else: + logger.error("Cant find associations file (associations.csv) in: " + str(emhass_conf['associations_path'])) + return False + + # Use association list to append parameters from config into params (with corresponding config catagories) + for association in associations: + if config.get(association[2],None) is not None: + params[association[0]][association[2]] = config[association[2]] + # If parameter contains list name + elif len(association) == 4 and config.get(association[3],None) is not None: + # Extract lists of dictionaries + if config[association[3]] and type(config[association[3]][0]) is dict: + params[association[0]][association[2]] = [i[association[2]] for i in config[association[3]]] + else: + params[association[0]][association[2]] = config[association[3]] + + # Check if we need to create `list_hp_periods` from config.json + if params.get('optim_conf',None) is not None and params['optim_conf'].get('load_peak_hour_periods',None) is None and config.get("list_peak_hours_periods_start_hours", None) is not None and config.get("list_peak_hours_periods_end_hours", None) is not None: + start_hours_list = [i["peak_hours_periods_start_hours"] for i in config["list_peak_hours_periods_start_hours"]] + end_hours_list = [i["peak_hours_periods_end_hours"] for i in config["list_peak_hours_periods_end_hours"]] + num_peak_hours = len(start_hours_list) + list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)] + params['optim_conf']['load_peak_hour_periods'] = list_hp_periods_list + else: + logger.warning("Unable to detect or create load_peak_hour_periods parameter") # Check parameter lists have the same amounts as deferrable loads # If not, set defaults it fill in gaps - num_def_loads = params['optim_conf']['num_def_loads'] - params['optim_conf']['def_start_timestep'] = check_def_loads(num_def_loads,params['optim_conf']['def_start_timestep'],0,'def_start_timestep',logger) - params['optim_conf']['def_end_timestep'] = check_def_loads(num_def_loads,params['optim_conf']['def_end_timestep'],0,'def_end_timestep',logger) - params['optim_conf']['set_def_constant'] = check_def_loads(num_def_loads,params['optim_conf']['set_def_constant'],False,'set_def_constant',logger) - params['optim_conf']['treat_def_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf']['treat_def_as_semi_cont'],True,'treat_def_as_semi_cont',logger) - params['optim_conf']['def_start_penalty'] = check_def_loads(num_def_loads,params['optim_conf']['def_start_penalty'],0.0,'def_start_penalty',logger) - params['optim_conf']['def_total_hours'] = check_def_loads(num_def_loads,params['optim_conf']['def_total_hours'],0,'def_total_hours',logger) - params['optim_conf']['P_deferrable_nom'] = check_def_loads(num_def_loads,params['optim_conf']['P_deferrable_nom'],0,'P_deferrable_nom',logger) + if params['optim_conf'].get('number_of_deferrable_loads',None) is not None: + num_def_loads = params['optim_conf']['number_of_deferrable_loads'] + params['optim_conf']['start_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'start_timesteps_of_each_deferrable_load',logger) + params['optim_conf']['end_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'end_timesteps_of_each_deferrable_load',logger) + params['optim_conf']['set_deferrable_load_single_constant'] = check_def_loads(num_def_loads,params['optim_conf'],False,'set_deferrable_load_single_constant',logger) + params['optim_conf']['treat_deferrable_load_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf'],True,'treat_deferrable_load_as_semi_cont',logger) + params['optim_conf']['set_deferrable_startup_penalty'] = check_def_loads(num_def_loads,params['optim_conf'],0.0,'set_deferrable_startup_penalty',logger) + params['optim_conf']['operating_hours_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'operating_hours_of_each_deferrable_load',logger) + params['optim_conf']['nominal_power_of_deferrable_loads'] = check_def_loads(num_def_loads,params['optim_conf'],0,'nominal_power_of_deferrable_loads',logger) + else: + logger.warning("unable to obtain parameter: number_of_deferrable_loads") # days_to_retrieve should be no less then 2 - if params["retrieve_hass_conf"]["days_to_retrieve"] < 2: - params["retrieve_hass_conf"]["days_to_retrieve"] = 2 + if params["retrieve_hass_conf"].get('historic_days_to_retrieve',None) is not None and params["retrieve_hass_conf"]['historic_days_to_retrieve'] < 2: + params["retrieve_hass_conf"]['historic_days_to_retrieve'] = 2 logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history") - # params['associations_dict'] = associations_dict + + # Format list_hp_periods + if params['optim_conf'].get('load_peak_hour_periods',None) is not None: + params['optim_conf']['load_peak_hour_periods'] = dict( + (key, d[key]) for d in params['optim_conf']['load_peak_hour_periods'] for key in d + ) + + # Configure secrets + params['retrieve_hass_conf']["hass_url"] = params_secrets.get("hass_url",None) + params['retrieve_hass_conf']["long_lived_token"] = params_secrets.get("long_lived_token",None) + params['retrieve_hass_conf']["time_zone"] = params_secrets.get("time_zone",None) + params['retrieve_hass_conf']['Latitude'] = params_secrets.get('Latitude',None) + params['retrieve_hass_conf']['Longitude'] = params_secrets.get('Longitude',None) + params['retrieve_hass_conf']['Altitude'] = params_secrets.get('Altitude',None) + # Update optional param secrets + if params["optim_conf"].get('weather_forecast_method',None): + if params["optim_conf"]['weather_forecast_method'] == "solcast": + params["params_secrets"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456") + params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456") + elif params["optim_conf"]['weather_forecast_method'] == "solar.forecast": + params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5) + else: + logger.warning("Unable to detect weather_forecast_method parameter") + # The params dict params["passed_data"] = { @@ -893,9 +1048,9 @@ def build_params(params: dict, params_secrets: dict, options: dict, "prediction_horizon": None, "soc_init": None, "soc_final": None, - "def_total_hours": None, - "def_start_timestep": None, - "def_end_timestep": None, + 'operating_hours_of_each_deferrable_load': None, + 'start_timesteps_of_each_deferrable_load': None, + 'end_timesteps_of_each_deferrable_load': None, "alpha": None, "beta": None, } @@ -908,7 +1063,7 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete :param num_def_loads: Total number deferrable loads :type num_def_loads: int - :param parameter: parameter list to check length + :param parameter: parameter config dict containing paramater :type: list[dict] :param default: default value for parameter to pad missing :type: obj @@ -920,11 +1075,11 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete :rtype: list[dict] """ - if num_def_loads > len(parameter): + if parameter.get(parameter_name,None) is not None and type(parameter[parameter_name]) is list and num_def_loads > len(parameter[parameter_name]): logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter") - for x in range(len(parameter), num_def_loads): - parameter.append(default) - return parameter + for x in range(len(parameter[parameter_name]), num_def_loads): + parameter[parameter_name].append(default) + return parameter[parameter_name] def get_days_list(days_to_retrieve: int) -> pd.date_range: diff --git a/src/emhass/web_server.py b/src/emhass/web_server.py index 5453884f..a6de0669 100644 --- a/src/emhass/web_server.py +++ b/src/emhass/web_server.py @@ -16,7 +16,7 @@ from emhass.command_line import regressor_model_fit, regressor_model_predict from emhass.command_line import publish_data, continual_publish from emhass.utils import get_injection_dict, get_injection_dict_forecast_model_fit, \ - get_injection_dict_forecast_model_tune, build_params + get_injection_dict_forecast_model_tune, build_config, build_secrets, build_params # Define the Flask instance app = Flask(__name__) @@ -125,9 +125,9 @@ def action_call(action_name): return make_response(grabLog(ActionStr), 400) # If continual_publish is True, start thread with loop function - if len(continual_publish_thread) == 0 and input_data_dict['retrieve_hass_conf'].get("continual_publish",False): + if len(continual_publish_thread) == 0 and input_data_dict['retrieve_hass_conf'].get('continual_publish',False): # Start Thread - continualLoop = threading.Thread(name="continual_publish",target=continual_publish,args=[input_data_dict,entity_path,app.logger]) + continualLoop = threading.Thread(name='continual_publish',target=continual_publish,args=[input_data_dict,entity_path,app.logger]) continualLoop.start() continual_publish_thread.append(continualLoop) @@ -245,56 +245,58 @@ def action_call(action_name): parser.add_argument('--addon', type=strtobool, default='False', help='Define if we are using EMHASS with the add-on or in standalone mode') parser.add_argument('--no_response', type=strtobool, default='False', help='This is set if json response errors occur') args = parser.parse_args() + + # Pre-built parameters (raw from config.json, legacy config (config_emhass.yaml), options.json (Home Assistant Secrets) and secrets_emhass.yaml parameter files) + config = {} + # secrets + params_secrets = {} + # Built parameters (inc. config and parameters) + params = None # Find env's, not not set defaults - use_options = os.getenv('USE_OPTIONS', default=False) - CONFIG_PATH = os.getenv("CONFIG_PATH", default="/app/config_emhass.yaml") - OPTIONS_PATH = os.getenv('OPTIONS_PATH', default="/app/options.json") + CONFIG_PATH = os.getenv('CONFIG_PATH', default="/app/config.json") + OPTIONS_PATH = os.getenv('OPTIONS_PATH', default="/app/options.json") + DEFAULTS_PATH = os.getenv('DEFAULTS_PATH', default="/app/data/config_defaults.json") + ASSOCIATIONS_PATH = os.getenv('ASSOCIATIONS_PATH', default="/app/data/associations.csv") + LEGACY_CONFIG_PATH = os.getenv("LEGACY_CONFIG_PATH", default="/app/config_emhass.yaml") DATA_PATH = os.getenv("DATA_PATH", default="/app/data/") ROOT_PATH = os.getenv("ROOT_PATH", default=str(Path(__file__).parent)) - # Options None by default - options = None - # Define the paths - options_json = Path(OPTIONS_PATH) config_path = Path(CONFIG_PATH) + options_path = Path(OPTIONS_PATH) + defaults_path = Path(DEFAULTS_PATH) + associations_path = Path(ASSOCIATIONS_PATH) + legacy_config_path = Path(LEGACY_CONFIG_PATH) data_path = Path(DATA_PATH) root_path = Path(ROOT_PATH) emhass_conf = {} - emhass_conf['config_path'] = options_json emhass_conf['config_path'] = config_path + emhass_conf['options_path'] = options_path + emhass_conf['defaults_path'] = defaults_path + emhass_conf['associations_path'] = associations_path + emhass_conf['legacy_config_path'] = legacy_config_path emhass_conf['data_path'] = data_path emhass_conf['root_path'] = root_path - - # Read options info - if options_json.exists(): - with options_json.open('r') as data: - options = json.load(data) - else: - app.logger.error("options.json does not exist") - raise Exception("options.json does not exist in path: "+str(options_json)) - - # If data path specified by options.json - if options is not None: - if options.get('data_path', None) != None and options.get('data_path', None) != "default": - DATA_PATH = options.get('data_path', None); - - # Check to see if legacy config_emhass.yaml was provided - params = {} - if config_path.exists(): - with open(config_path, 'r') as file: - config = yaml.load(file, Loader=yaml.FullLoader) - retrieve_hass_conf = config['retrieve_hass_conf'] - optim_conf = config['optim_conf'] - plant_conf = config['plant_conf'] - params['retrieve_hass_conf'] = retrieve_hass_conf - params['optim_conf'] = optim_conf - params['plant_conf'] = plant_conf - else: - params = {} - + web_ui_url = '0.0.0.0' + costfun = os.getenv('LOCAL_COSTFUN', config.get('costfun', 'profit')) + logging_level = os.getenv('LOGGING_LEVEL', config.get('logging_level','INFO')) + + config = {} + # Combine parameters from all configuration files (if exists) + config.update(build_config(emhass_conf,app.logger,defaults_path,config_path,legacy_config_path)) + + ## secrets + argument = {} + if args.url: + argument['url'] = args.url + if args.key: + argument['key'] = args.key + + # Combine secrets from ENV,ARG, Secrets file and/or Home Assistant + emhass_conf, built_secrets = build_secrets(emhass_conf,app.logger,argument,options_path,os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), bool(args.no_response)) + params_secrets.update(built_secrets) # Initialize this global dict if (emhass_conf['data_path'] / 'injection_dict.pkl').exists(): @@ -302,75 +304,18 @@ def action_call(action_name): injection_dict = pickle.load(fid) else: injection_dict = None - - ## secrets - params_secrets = {} - - # secrets from ARG or ENV? - hass_url = os.getenv("EMHASS_URL", default=args.url) - key = os.getenv("SUPERVISOR_TOKEN", os.getenv("EMHASS_KEY", args.key)) - params_secrets['time_zone'] = os.getenv("TIME_ZONE", default="Europe/Paris") - params_secrets['lat'] = float(os.getenv("LAT", default="45.83")) - params_secrets['lon'] = float(os.getenv("LON", default="6.86")) - params_secrets['alt'] = float(os.getenv("ALT", default="4807.8")) - costfun = os.getenv('LOCAL_COSTFUN', options.get('costfun', 'profit')) - logging_level = os.getenv('LOGGING_LEVEL', options.get('logging_level','INFO')) - # if url or key is None, Set as empty string to reduce NoneType errors bellow - if key is None: key = "" - if hass_url is None: hass_url = "" - - # secrets from Home Assistant? - url_from_options = options.get('hass_url', 'empty') - key_from_options = options.get('long_lived_token', 'empty') - # to use Home Assistant local API - if (url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and os.getenv("SUPERVISOR_TOKEN", None) is not None: - hass_url = "http://supervisor/core/api/config" - headers = { - "Authorization": "Bearer " + key, - "content-type": "application/json" - } - if not args.no_response==1: - app.logger.debug("obtaining secrets from Home Assistant API") - response = get(hass_url, headers=headers) - if response.status_code < 400: - config_hass = response.json() - params_secrets = { - 'hass_url': hass_url, - 'long_lived_token': key, - 'time_zone': config_hass['time_zone'], - 'lat': config_hass['latitude'], - 'lon': config_hass['longitude'], - 'alt': config_hass['elevation'] - } - else: - hass_url = url_from_options - if key_from_options == 'empty' or key_from_options == '': - key = key_from_options - # secrets from secrets_emhass.yaml? - if Path(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml')).is_file(): - app.logger.debug("obtaining secrets from secrets file") - with open(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), 'r') as file: - params_secrets = yaml.load(file, Loader=yaml.FullLoader) - #Check if URL and KEY are provided by file. - if params_secrets.get("hass_url", "empty") != "empty": - hass_url = params_secrets['hass_url'] - if params_secrets.get("long_lived_token", "empty") != "empty": - key = params_secrets['long_lived_token'] - - params_secrets['hass_url'] = hass_url - params_secrets['long_lived_token'] = key - - # Build params - params = build_params(params, params_secrets, options, app.logger) + # Build params from config and param_secrets + params = build_params(emhass_conf, params_secrets, config, app.logger) + if type(params) is bool: + raise Exception("A error has occured while building parameters") if os.path.exists(str(emhass_conf['data_path'])): with open(str(emhass_conf['data_path'] / 'params.pkl'), "wb") as fid: pickle.dump((config_path, params), fid) else: raise Exception("missing: " + str(emhass_conf['data_path'])) - # Define logger - # Stream logger + # Define loggers ch = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) @@ -418,7 +363,7 @@ def action_call(action_name): # Launch server port = int(os.environ.get('PORT', 5000)) app.logger.info("Launching the emhass webserver at: http://"+web_ui_url+":"+str(port)) - app.logger.info("Home Assistant data fetch will be performed using url: "+hass_url) + app.logger.info("Home Assistant data fetch will be performed using url: "+params_secrets['hass_url']) app.logger.info("The data path is: "+str(emhass_conf['data_path'])) try: app.logger.info("Using core emhass version: "+version('emhass')) diff --git a/tests/test_command_line_utils.py b/tests/test_command_line_utils.py index b779ed49..ba31fd0e 100644 --- a/tests/test_command_line_utils.py +++ b/tests/test_command_line_utils.py @@ -4,7 +4,7 @@ import unittest from unittest.mock import patch import pandas as pd -import pathlib, json, yaml, copy +import pathlib, json, copy import numpy as np from emhass.command_line import set_input_data_dict @@ -25,40 +25,37 @@ from emhass import utils # create paths - -root = str(utils.get_root(__file__, num_parent=2)) +root = pathlib.Path(utils.get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['config_path'] = root / 'config.json' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' + -# create logger +# create loggerW logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) class TestCommandLineUtils(unittest.TestCase): @staticmethod def get_test_params(): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 8000.0 - }}) - #Force config params for testing - params["retrieve_hass_conf"]['var_PV'] = 'sensor.power_photovoltaics' - params["retrieve_hass_conf"]['var_load'] = 'sensor.power_load_no_var_loads' - params["retrieve_hass_conf"]['var_replace_zero'] = ['sensor.power_photovoltaics'] - params["retrieve_hass_conf"]['var_interp'] = ['sensor.power_photovoltaics','sensor.power_load_no_var_loads'] + # Build default params + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) return params def setUp(self): params = TestCommandLineUtils.get_test_params() + # Add runtime parameters for forecast lists runtimeparams = { 'pv_power_forecast':[i+1 for i in range(48)], 'load_power_forecast':[i+1 for i in range(48)], @@ -69,8 +66,10 @@ def setUp(self): params['passed_data'] = runtimeparams self.params_json = json.dumps(params) + # Test input data for actions (using data from file) def test_set_input_data_dict(self): costfun = 'profit' + # Test dayahead action = 'dayahead-optim' input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, action, logger, get_data_from_file=True) @@ -82,7 +81,8 @@ def test_set_input_data_dict(self): self.assertTrue(input_data_dict['fcst'].optim_conf['weather_forecast_method']=='list') self.assertTrue(input_data_dict['fcst'].optim_conf['load_forecast_method']=='list') self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['prod_price_forecast_method']=='list') + self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') + # Test publish data action = 'publish-data' input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, action, logger, get_data_from_file=True) @@ -90,6 +90,7 @@ def test_set_input_data_dict(self): self.assertTrue(input_data_dict['df_input_data_dayahead'] == None) self.assertTrue(input_data_dict['P_PV_forecast'] == None) self.assertTrue(input_data_dict['P_load_forecast'] == None) + # Test naive mpc action = 'naive-mpc-optim' input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, action, logger, get_data_from_file=True) @@ -98,6 +99,7 @@ def test_set_input_data_dict(self): self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) self.assertTrue(len(input_data_dict['df_input_data_dayahead'])==10) # The default value for prediction_horizon + # Test Naive mpc with a shorter forecast = runtimeparams = { 'pv_power_forecast':[1,2,3,4,5,6,7,8,9,10], 'load_power_forecast':[1,2,3,4,5,6,7,8,9,10], @@ -115,6 +117,7 @@ def test_set_input_data_dict(self): self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) self.assertTrue(len(input_data_dict['df_input_data_dayahead'])==10) # The default value for prediction_horizon + # Test naive mpc with a shorter forecast and prediction horizon = 10 action = 'naive-mpc-optim' runtimeparams['prediction_horizon'] = 10 runtimeparams_json = json.dumps(runtimeparams) @@ -141,10 +144,10 @@ def test_set_input_data_dict(self): input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, action, logger, get_data_from_file=True) self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['prod_price_forecast_method']=='list') + self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') + # Test day-ahead optimization def test_webserver_get_injection_dict(self): - # First perform a day-ahead optimization costfun = 'profit' action = 'dayahead-optim' input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, @@ -155,7 +158,9 @@ def test_webserver_get_injection_dict(self): self.assertIsInstance(injection_dict['table1'], str) self.assertIsInstance(injection_dict['table2'], str) + # Test data formatting of dayahead optimization with load cost and prod price as lists def test_dayahead_forecast_optim(self): + # Test dataframe output of profit dayahead optimization costfun = 'profit' action = 'dayahead-optim' params = copy.deepcopy(json.loads(self.params_json)) @@ -165,7 +170,7 @@ def test_dayahead_forecast_optim(self): self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum()==0) self.assertTrue(len(opt_res)==len(params['passed_data']['pv_power_forecast'])) - # Test passing just load cost and prod price as lists + # Test dayahead output, passing just load cost and prod price as runtime lists (costfun=profit) action = 'dayahead-optim' params = TestCommandLineUtils.get_test_params() runtimeparams = { @@ -181,10 +186,11 @@ def test_dayahead_forecast_optim(self): self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum()==0) self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['prod_price_forecast_method']=='list') + self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') self.assertEqual(opt_res['unit_load_cost'].values.tolist(), runtimeparams['load_cost_forecast']) self.assertEqual(opt_res['unit_prod_price'].values.tolist(), runtimeparams['prod_price_forecast']) + # Test dataframe outpit of perfect forecast optimization def test_perfect_forecast_optim(self): costfun = 'profit' action = 'perfect-optim' @@ -197,7 +203,9 @@ def test_perfect_forecast_optim(self): self.assertIsInstance(opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertTrue('cost_fun_'+input_data_dict["costfun"] in opt_res.columns) + # Test naive mpc optimization def test_naive_mpc_optim(self): + # Test mpc optimization costfun = 'profit' action = 'naive-mpc-optim' params = copy.deepcopy(json.loads(self.params_json)) @@ -207,16 +215,16 @@ def test_naive_mpc_optim(self): self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum()==0) self.assertTrue(len(opt_res)==10) - # A test similar to the docs + # Test mpc optimization with runtime parameters similar to the documentation runtimeparams = {"pv_power_forecast": [1,2,3,4,5,6,7,8,9,10], - "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,"def_total_hours":[1,3],"def_start_timestep":[-3,0],"def_end_timestep":[8,0]} + "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,'operating_hours_of_each_deferrable_load':[1,3],'start_timesteps_of_each_deferrable_load':[-3,0],'end_timesteps_of_each_deferrable_load':[8,0]} runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params['optim_conf']['weather_forecast_method'] = 'list' params['optim_conf']['load_forecast_method'] = 'naive' params['optim_conf']['load_cost_forecast_method'] = 'hp_hc_periods' - params['optim_conf']['prod_price_forecast_method'] = 'constant' + params['optim_conf']['production_price_forecast_method'] = 'constant' params_json = json.dumps(params) input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, action, logger, get_data_from_file=True) @@ -225,6 +233,7 @@ def test_naive_mpc_optim(self): self.assertTrue(opt_res.isnull().sum().sum()==0) self.assertTrue(len(opt_res)==10) # Test publish after passing the forecast as list + # with method_ts_round=first costfun = 'profit' action = 'naive-mpc-optim' params = copy.deepcopy(json.loads(self.params_json)) @@ -238,6 +247,7 @@ def test_naive_mpc_optim(self): action, logger, get_data_from_file=True) opt_res_first = publish_data(input_data_dict, logger, opt_res_latest=opt_res) self.assertTrue(len(opt_res_first)==1) + # test mpc and publish with method_ts_round=last and set_use_battery=true action = 'naive-mpc-optim' params = copy.deepcopy(json.loads(self.params_json)) params['retrieve_hass_conf']['method_ts_round'] = 'last' @@ -251,12 +261,12 @@ def test_naive_mpc_optim(self): action, logger, get_data_from_file=True) opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) self.assertTrue(len(opt_res_last)==1) - # Reproduce when trying to publish data but params=None and runtimeparams=None - action = 'publish-data' - input_data_dict = set_input_data_dict(emhass_conf, costfun, None, None, - action, logger, get_data_from_file=True) - opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) - self.assertTrue(len(opt_res_last)==1) + # Reproduce when trying to publish data params=None and runtimeparams=None + # action = 'publish-data' + # input_data_dict = set_input_data_dict(emhass_conf, costfun, None, None, + # action, logger, get_data_from_file=True) + # opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) + # self.assertTrue(len(opt_res_last)==1) # Check if status is published from datetime import datetime now_precise = datetime.now(input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0) @@ -272,12 +282,13 @@ def test_naive_mpc_optim(self): self.assertTrue(hasattr(response, '__class__')) self.assertTrue(data['attributes']['friendly_name'] == 'EMHASS optimization status') + # Test outputs of fit, predict and tune def test_forecast_model_fit_predict_tune(self): costfun = 'profit' action = 'forecast-model-fit' # fit, predict and tune methods params = TestCommandLineUtils.get_test_params() runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -290,7 +301,6 @@ def test_forecast_model_fit_predict_tune(self): "model_predict_friendly_name": "Load Power Forecast KNN regressor" } runtimeparams_json = json.dumps(runtimeparams) - # params['passed_data'] = runtimeparams params['optim_conf']['load_forecast_method'] = 'skforecast' params_json = json.dumps(params) input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, @@ -333,6 +343,7 @@ def test_forecast_model_fit_predict_tune(self): self.assertIsInstance(injection_dict, dict) self.assertIsInstance(injection_dict["figure_0"], str) + # Test data formatting of regressor model fit amd predict def test_regressor_model_fit_predict(self): costfun = "profit" action = "regressor-model-fit" # fit and predict methods @@ -417,12 +428,14 @@ def test_regressor_model_fit_predict(self): regressor_model_predict(input_data_dict, logger, debug=True, mlr=mlr) + # CLI test action that does not exist @patch('sys.argv', ['main', '--action', 'test', '--config', str(emhass_conf['config_path']), '--debug', 'True']) def test_main_wrong_action(self): opt_res = main() self.assertEqual(opt_res, None) + # CLI test action perfect-optim action @patch('sys.argv', ['main', '--action', 'perfect-optim', '--config', str(emhass_conf['config_path']), '--debug', 'True', '--params', json.dumps(get_test_params())]) def test_main_perfect_forecast_optim(self): @@ -435,6 +448,7 @@ def test_main_perfect_forecast_optim(self): pd.core.dtypes.dtypes.DatetimeTZDtype, ) + # CLI test dayahead forecast optimzation action def test_main_dayahead_forecast_optim(self): with patch('sys.argv', ['main', '--action', 'dayahead-optim', '--config', str(emhass_conf['config_path']), '--params', self.params_json, '--runtimeparams', self.runtimeparams_json, @@ -443,6 +457,7 @@ def test_main_dayahead_forecast_optim(self): self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum() == 0) + # CLI test naive mpc optimzation action def test_main_naive_mpc_optim(self): with patch('sys.argv', ['main', '--action', 'naive-mpc-optim', '--config', str(emhass_conf['config_path']), '--params', self.params_json, '--runtimeparams', self.runtimeparams_json, @@ -452,10 +467,11 @@ def test_main_naive_mpc_optim(self): self.assertTrue(opt_res.isnull().sum().sum() == 0) self.assertTrue(len(opt_res) == 10) + # CLI test forecast model fit action def test_main_forecast_model_fit(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -474,10 +490,11 @@ def test_main_forecast_model_fit(self): self.assertIsInstance(df_fit_pred, pd.DataFrame) self.assertTrue(df_fit_pred_backtest == None) + # CLI test forecast model predict action def test_main_forecast_model_predict(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -487,7 +504,7 @@ def test_main_forecast_model_predict(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]["load_forecast_method"] = "skforecast" + params["optim_conf"]['load_forecast_method'] = "skforecast" params_json = json.dumps(params) with patch('sys.argv', ['main', '--action', 'forecast-model-predict', '--config', str(emhass_conf['config_path']), '--params', params_json, '--runtimeparams', runtimeparams_json, @@ -496,10 +513,11 @@ def test_main_forecast_model_predict(self): self.assertIsInstance(df_pred, pd.Series) self.assertTrue(df_pred.isnull().sum().sum() == 0) + # CLI test forecast model tune action def test_main_forecast_model_tune(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -509,7 +527,7 @@ def test_main_forecast_model_tune(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]["load_forecast_method"] = "skforecast" + params["optim_conf"]['load_forecast_method'] = "skforecast" params_json = json.dumps(params) with patch('sys.argv', ['main', '--action', 'forecast-model-tune', '--config', str(emhass_conf['config_path']), '--params', params_json, '--runtimeparams', runtimeparams_json, @@ -518,6 +536,7 @@ def test_main_forecast_model_tune(self): self.assertIsInstance(df_pred_optim, pd.DataFrame) self.assertTrue(mlf.is_tuned == True) + # CLI test regressor model fit action def test_main_regressor_model_fit(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { @@ -550,6 +569,7 @@ def test_main_regressor_model_fit(self): ): mlr = main() + # CLI test regressor model predict action def test_main_regressor_model_predict(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { @@ -564,7 +584,7 @@ def test_main_regressor_model_predict(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]["load_forecast_method"] = "skforecast" + params["optim_conf"]['load_forecast_method'] = "skforecast" params_json = json.dumps(params) with patch( "sys.argv", @@ -584,8 +604,8 @@ def test_main_regressor_model_predict(self): ): prediction = main() self.assertIsInstance(prediction, np.ndarray) - + # CLI test publish data action @patch('sys.argv', ['main', '--action', 'publish-data', '--config', str(emhass_conf['config_path']), '--debug', 'True']) def test_main_publish_data(self): diff --git a/tests/test_forecast.py b/tests/test_forecast.py index 0b656fc3..49fb00f3 100644 --- a/tests/test_forecast.py +++ b/tests/test_forecast.py @@ -5,7 +5,7 @@ import os import requests_mock import pandas as pd -import pathlib, pickle, json, copy, yaml +import pathlib, pickle, json, copy import bz2 import _pickle as cPickle @@ -17,11 +17,13 @@ from emhass import utils # the root folder -root = str(utils.get_root(__file__, num_parent=2)) +root = pathlib.Path(utils.get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' + # create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) @@ -30,47 +32,48 @@ class TestForecast(unittest.TestCase): @staticmethod def get_test_params(): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 8000.0 - } - }) + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) return params def setUp(self): self.get_data_from_file = True - params = None - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=False) + params = json.dumps(TestForecast.get_test_params()) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \ retrieve_hass_conf, optim_conf, plant_conf + # Create RetrieveHass object self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'], + self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], params, emhass_conf, logger) + # Obtain sensor values from saved file if self.get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) - self.retrieve_hass_conf['var_load'] = str(self.var_list[0]) - self.retrieve_hass_conf['var_PV'] = str(self.var_list[1]) - self.retrieve_hass_conf['var_interp'] = [retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - self.retrieve_hass_conf['var_replace_zero'] = [retrieve_hass_conf['var_PV']] + self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) + self.retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) + self.retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + self.retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + # Else obtain sensor values from HA else: - self.days_list = utils.get_days_list(self.retrieve_hass_conf['days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['var_load'], self.retrieve_hass_conf['var_PV']] + self.days_list = utils.get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) + self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] self.rh.get_data(self.days_list, self.var_list, minimal_response=False, significant_changes_only=False) - self.rh.prepare_data(self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'], + # Prepare data for optimization + self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['var_replace_zero'], - var_interp = self.retrieve_hass_conf['var_interp']) + var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], + var_interp = self.retrieve_hass_conf['sensor_linear_interp']) self.df_input_data = self.rh.df_final.copy() - + # Create forecast Object self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, params, emhass_conf, logger, get_data_from_file=self.get_data_from_file) # The default for test is csv read @@ -82,6 +85,7 @@ def setUp(self): self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, 'profit', emhass_conf, logger) + # Manually create input data (from formatted parameter) dictionary self.input_data_dict = { 'emhass_conf': emhass_conf, 'retrieve_hass_conf': self.retrieve_hass_conf, @@ -95,7 +99,10 @@ def setUp(self): 'params': params } + + # Test weather forecast dataframe output based on saved csv file def test_get_weather_forecast_csv(self): + # Test dataframe from get weather forecast self.df_weather_csv = self.fcst.get_weather_forecast(method='csv') self.assertEqual(self.fcst.weather_forecast_method, 'csv') self.assertIsInstance(self.df_weather_csv, type(pd.DataFrame())) @@ -104,7 +111,8 @@ def test_get_weather_forecast_csv(self): self.assertEqual(self.df_weather_csv.index.tz, self.fcst.time_zone) self.assertTrue(self.fcst.start_forecast < ts for ts in self.df_weather_csv.index) self.assertEqual(len(self.df_weather_csv), - int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep)) + int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + # Test dataframe from get power from weather P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather_csv) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) @@ -114,12 +122,14 @@ def test_get_weather_forecast_csv(self): df_weather_none = self.fcst.get_weather_forecast(method='none') self.assertTrue(df_weather_none == None) + # Test output weather forecast using scrapper with mock get request data def test_get_weather_forecast_scrapper_method_mock(self): with requests_mock.mock() as m: data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_scrapper_get_method.pbz2'), "rb") data = cPickle.load(data) get_url = "https://clearoutside.com/forecast/"+str(round(self.fcst.lat, 2))+"/"+str(round(self.fcst.lon, 2))+"?desktop=true" m.get(get_url, content=data) + # Test dataframe output from get weather forecast df_weather_scrap = self.fcst.get_weather_forecast(method='scrapper') self.assertIsInstance(df_weather_scrap, type(pd.DataFrame())) self.assertIsInstance(df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex) @@ -127,15 +137,17 @@ def test_get_weather_forecast_scrapper_method_mock(self): self.assertEqual(df_weather_scrap.index.tz, self.fcst.time_zone) self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index) self.assertEqual(len(df_weather_scrap), - int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep)) + int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + # Test dataframe output from get power from weather forecast P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(df_weather_scrap), len(P_PV_forecast)) - self.plant_conf['module_model'] = [self.plant_conf['module_model'][0], self.plant_conf['module_model'][0]] - self.plant_conf['inverter_model'] = [self.plant_conf['inverter_model'][0], self.plant_conf['inverter_model'][0]] + # Test dataframe output from get power from weather forecast (with 2 PV plant's) + self.plant_conf['pv_module_model'] = [self.plant_conf['pv_module_model'][0], self.plant_conf['pv_module_model'][0]] + self.plant_conf['pv_inverter_model'] = [self.plant_conf['pv_inverter_model'][0], self.plant_conf['pv_inverter_model'][0]] self.plant_conf['surface_tilt'] = [30, 45] self.plant_conf['surface_azimuth'] = [270, 90] self.plant_conf['modules_per_string'] = [8, 8] @@ -146,7 +158,8 @@ def test_get_weather_forecast_scrapper_method_mock(self): self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(df_weather_scrap), len(P_PV_forecast)) - + + # Test output weather forecast using Solcast with mock get request data def test_get_weather_forecast_solcast_method_mock(self): self.fcst.params = {'passed_data': {'weather_forecast_cache': False, 'weather_forecast_cache_only': False}} self.fcst.retrieve_hass_conf['solcast_api_key'] = "123456" @@ -165,17 +178,18 @@ def test_get_weather_forecast_solcast_method_mock(self): self.assertEqual(df_weather_scrap.index.tz, self.fcst.time_zone) self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index) self.assertEqual(len(df_weather_scrap), - int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep)) + int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) if os.path.isfile(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl"): os.rename(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl", emhass_conf['data_path'] / "weather_forecast_data.pkl") + # Test output weather forecast using Forecast.Solar with mock get request data def test_get_weather_forecast_solarforecast_method_mock(self): with requests_mock.mock() as m: data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_solarforecast_get_method.pbz2'), "rb") data = cPickle.load(data) - for i in range(len(self.plant_conf['module_model'])): + for i in range(len(self.plant_conf['pv_module_model'])): get_url = "https://api.forecast.solar/estimate/"+str(round(self.fcst.lat, 2))+"/"+str(round(self.fcst.lon, 2))+\ - "/"+str(self.plant_conf["surface_tilt"][i])+"/"+str(self.plant_conf["surface_azimuth"][i]-180)+\ + "/"+str(self.plant_conf['surface_tilt'][i])+"/"+str(self.plant_conf['surface_azimuth'][i]-180)+\ "/"+str(5) m.get(get_url, json=data) df_weather_solarforecast = self.fcst.get_weather_forecast(method='solar.forecast') @@ -185,21 +199,21 @@ def test_get_weather_forecast_solarforecast_method_mock(self): self.assertEqual(df_weather_solarforecast.index.tz, self.fcst.time_zone) self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_solarforecast.index) self.assertEqual(len(df_weather_solarforecast), - int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep)) + int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + # Test output weather forecast using passed runtime lists def test_get_forecasts_with_lists(self): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 4807.8 - } - }) + # Load default params + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + # Create 48 (1 day of data) long lists runtime forecasts parameters runtimeparams = { 'pv_power_forecast':[i+1 for i in range(48)], 'load_power_forecast':[i+1 for i in range(48)], @@ -209,35 +223,41 @@ def test_get_forecasts_with_lists(self): runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, - use_secrets=False, params=params_json) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( runtimeparams_json, params_json, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + # Build RetrieveHass Object rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], params, emhass_conf, logger) + # Obtain sensor values from saved file if self.get_data_from_file: with open((emhass_conf['data_path'] / 'test_df_final.pkl'), 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['var_load'] = str(self.var_list[0]) - retrieve_hass_conf['var_PV'] = str(self.var_list[1]) - retrieve_hass_conf['var_interp'] = [retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - retrieve_hass_conf['var_replace_zero'] = [retrieve_hass_conf['var_PV']] + retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) + retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) + retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + # Else obtain sensor values from HA else: - days_list = utils.get_days_list(retrieve_hass_conf['days_to_retrieve']) - var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']] + days_list = utils.get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) + var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'], + # Prepare data for optimization + rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['var_replace_zero'], - var_interp = retrieve_hass_conf['var_interp']) + var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], + var_interp = retrieve_hass_conf['sensor_linear_interp']) df_input_data = rh.df_final.copy() + # Build Forecast Object fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params_json, emhass_conf, logger, get_data_from_file=True) + # Obtain only 48 rows of data and remove last column for input df_input_data = copy.deepcopy(df_input_data).iloc[-49:-1] + # Get Weather forecast with list, check dataframe output P_PV_forecast = fcst.get_weather_forecast(method='list') df_input_data.index = P_PV_forecast.index df_input_data.index.freq = rh.df_final.index.freq @@ -248,6 +268,7 @@ def test_get_forecasts_with_lists(self): self.assertTrue(fcst.start_forecast < ts for ts in P_PV_forecast.index) self.assertTrue(P_PV_forecast.values[0][0] == 1) self.assertTrue(P_PV_forecast.values[-1][0] == 48) + # Get load forecast with list, check dataframe output P_load_forecast = fcst.get_load_forecast(method='list') self.assertIsInstance(P_load_forecast, pd.core.series.Series) self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) @@ -256,29 +277,33 @@ def test_get_forecasts_with_lists(self): self.assertEqual(len(P_PV_forecast), len(P_load_forecast)) self.assertTrue(P_load_forecast.values[0] == 1) self.assertTrue(P_load_forecast.values[-1] == 48) + # Get load cost forecast with list, check dataframe output df_input_data = fcst.get_load_cost_forecast(df_input_data, method='list') self.assertTrue(fcst.var_load_cost in df_input_data.columns) self.assertTrue(df_input_data.isnull().sum().sum()==0) self.assertTrue(df_input_data['unit_load_cost'].values[0] == 1) self.assertTrue(df_input_data['unit_load_cost'].values[-1] == 48) + # Get production price forecast with list, check dataframe output df_input_data = fcst.get_prod_price_forecast(df_input_data, method='list') self.assertTrue(fcst.var_prod_price in df_input_data.columns) self.assertTrue(df_input_data.isnull().sum().sum()==0) self.assertTrue(df_input_data['unit_prod_price'].values[0] == 1) self.assertTrue(df_input_data['unit_prod_price'].values[-1] == 48) - # Test with longer lists - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 4807.8 - } - }) + + # Test output weather forecast using longer passed runtime lists + def test_get_forecasts_with_longer_lists(self): + # Load default params + params = {} + set_type = "dayahead-optim" + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + # Create 3*48 (3 days of data) long lists runtime forecasts parameters runtimeparams = { 'pv_power_forecast':[i+1 for i in range(3*48)], 'load_power_forecast':[i+1 for i in range(3*48)], @@ -288,14 +313,15 @@ def test_get_forecasts_with_lists(self): runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, - use_secrets=False, params=params_json) - optim_conf['delta_forecast'] = pd.Timedelta(days=3) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) + optim_conf['delta_forecast_daily'] = pd.Timedelta(days=3) params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( runtimeparams_json, params_json, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + # Create Forecast Object fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params_json, emhass_conf, logger, get_data_from_file=True) + # Get weather forecast with list, check dataframe output P_PV_forecast = fcst.get_weather_forecast(method='list') self.assertIsInstance(P_PV_forecast, type(pd.DataFrame())) self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) @@ -304,6 +330,7 @@ def test_get_forecasts_with_lists(self): self.assertTrue(fcst.start_forecast < ts for ts in P_PV_forecast.index) self.assertTrue(P_PV_forecast.values[0][0] == 1) self.assertTrue(P_PV_forecast.values[-1][0] == 3*48) + # Get load forecast with list, check dataframe output P_load_forecast = fcst.get_load_forecast(method='list') self.assertIsInstance(P_load_forecast, pd.core.series.Series) self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) @@ -315,30 +342,32 @@ def test_get_forecasts_with_lists(self): df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead) df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] + # Get load cost forecast with list, check dataframe output df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead, method='list') self.assertTrue(fcst.var_load_cost in df_input_data_dayahead.columns) self.assertTrue(df_input_data_dayahead.isnull().sum().sum()==0) self.assertTrue(df_input_data_dayahead[fcst.var_load_cost].iloc[0] == 1) self.assertTrue(df_input_data_dayahead[fcst.var_load_cost].iloc[-1] == 3*48) + # Get production price forecast with list, check dataframe output df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead, method='list') self.assertTrue(fcst.var_prod_price in df_input_data_dayahead.columns) self.assertTrue(df_input_data_dayahead.isnull().sum().sum()==0) self.assertTrue(df_input_data_dayahead[fcst.var_prod_price].iloc[0] == 1) self.assertTrue(df_input_data_dayahead[fcst.var_prod_price].iloc[-1] == 3*48) - + + # Test output values of weather forecast using passed runtime lists and saved sensor data def test_get_forecasts_with_lists_special_case(self): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 4807.8 - } - }) + # Load default params + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + # Create 48 (1 day of data) long lists runtime forecasts parameters runtimeparams = { 'load_cost_forecast':[i+1 for i in range(48)], 'prod_price_forecast':[i+1 for i in range(48)] @@ -346,44 +375,52 @@ def test_get_forecasts_with_lists_special_case(self): runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, - use_secrets=False, params=params_json) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( runtimeparams_json, params_json, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + # Create RetrieveHass Object rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], params, emhass_conf, logger) + # Obtain sensor values from saved file if self.get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['var_load'] = str(self.var_list[0]) - retrieve_hass_conf['var_PV'] = str(self.var_list[1]) - retrieve_hass_conf['var_interp'] = [retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - retrieve_hass_conf['var_replace_zero'] = [retrieve_hass_conf['var_PV']] + retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) + retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) + retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + # Else obtain sensor values from HA else: - days_list = utils.get_days_list(retrieve_hass_conf['days_to_retrieve']) - var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']] + days_list = utils.get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) + var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'], + # Prepare data for optimization + rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['var_replace_zero'], - var_interp = retrieve_hass_conf['var_interp']) + var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], + var_interp = retrieve_hass_conf['sensor_linear_interp']) df_input_data = rh.df_final.copy() + # Create forecast object fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params_json, emhass_conf, logger, get_data_from_file=True) + # Obtain only 48 rows of data and remove last column for input df_input_data = copy.deepcopy(df_input_data).iloc[-49:-1] + # Get weather forecast with list P_PV_forecast = fcst.get_weather_forecast() df_input_data.index = P_PV_forecast.index df_input_data.index.freq = rh.df_final.index.freq + # Get load cost forecast with list, check values from output df_input_data = fcst.get_load_cost_forecast( df_input_data, method='list') self.assertTrue(fcst.var_load_cost in df_input_data.columns) self.assertTrue(df_input_data.isnull().sum().sum()==0) self.assertTrue(df_input_data['unit_load_cost'].values[0] == 1) self.assertTrue(df_input_data['unit_load_cost'].values[-1] == 48) + # Get production price forecast with list, check values from output df_input_data = fcst.get_prod_price_forecast( df_input_data, method='list') self.assertTrue(fcst.var_prod_price in df_input_data.columns) @@ -397,9 +434,9 @@ def test_get_power_from_weather(self): self.assertIsInstance(self.P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertEqual(self.P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_scrap), len(self.P_PV_forecast)) - # Lets test passing a lists of PV params - self.plant_conf['module_model'] = [self.plant_conf['module_model'], self.plant_conf['module_model']] - self.plant_conf['inverter_model'] = [self.plant_conf['inverter_model'], self.plant_conf['inverter_model']] + # Test passing a lists of PV params + self.plant_conf['pv_module_model'] = [self.plant_conf['pv_module_model'], self.plant_conf['pv_module_model']] + self.plant_conf['pv_inverter_model'] = [self.plant_conf['pv_inverter_model'], self.plant_conf['pv_inverter_model']] self.plant_conf['surface_tilt'] = [30, 45] self.plant_conf['surface_azimuth'] = [270, 90] self.plant_conf['modules_per_string'] = [8, 8] @@ -426,6 +463,7 @@ def test_get_power_from_weather(self): self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_scrap), len(P_PV_forecast)) + # Test dataframe output of load forecast def test_get_load_forecast(self): P_load_forecast = self.fcst.get_load_forecast() self.assertIsInstance(P_load_forecast, pd.core.series.Series) @@ -453,14 +491,16 @@ def test_get_load_forecast(self): self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) + # Test dataframe output of ml load forecast def test_get_load_forecast_mlforecaster(self): params = TestForecast.get_test_params() params_json = json.dumps(params) costfun = 'profit' action = 'forecast-model-fit' # fit, predict and tune methods params = copy.deepcopy(json.loads(params_json)) + # pass custom runtime parameters runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -473,12 +513,14 @@ def test_get_load_forecast_mlforecaster(self): input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, action, logger, get_data_from_file=True) data = copy.deepcopy(input_data_dict['df_input_data']) + # Create MLForecaster Object model_type = input_data_dict['params']['passed_data']['model_type'] var_model = input_data_dict['params']['passed_data']['var_model'] sklearn_model = input_data_dict['params']['passed_data']['sklearn_model'] num_lags = input_data_dict['params']['passed_data']['num_lags'] mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) mlf.fit() + # Get load forecast using mlforecaster P_load_forecast = input_data_dict['fcst'].get_load_forecast(method="mlforecaster", use_last_window=False, debug=True, mlf=mlf) self.assertIsInstance(P_load_forecast, pd.core.series.Series) @@ -488,6 +530,7 @@ def test_get_load_forecast_mlforecaster(self): self.assertTrue((P_load_forecast.index == self.fcst.forecast_dates).all()) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) + # Test load cost forecast dataframe output using saved csv referece file def test_get_load_cost_forecast(self): df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data) self.assertTrue(self.fcst.var_load_cost in df_input_data.columns) @@ -497,6 +540,7 @@ def test_get_load_cost_forecast(self): self.assertTrue(self.fcst.var_load_cost in df_input_data.columns) self.assertTrue(df_input_data.isnull().sum().sum()==0) + # Test production price forecast dataframe output using saved csv referece file def test_get_prod_price_forecast(self): df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data) self.assertTrue(self.fcst.var_prod_price in df_input_data.columns) diff --git a/tests/test_machine_learning_forecaster.py b/tests/test_machine_learning_forecaster.py index 5fd504cf..d8f664dd 100644 --- a/tests/test_machine_learning_forecaster.py +++ b/tests/test_machine_learning_forecaster.py @@ -5,7 +5,6 @@ from unittest.mock import patch import pathlib import json -import yaml import copy import pickle import pandas as pd @@ -19,11 +18,12 @@ from emhass import utils # the root folder -root = str(utils.get_root(__file__, num_parent=2)) +root = pathlib.Path(utils.get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' # create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) @@ -32,28 +32,24 @@ class TestMLForecaster(unittest.TestCase): @staticmethod def get_test_params(): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 8000.0 - } - }) + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) return params def setUp(self): params = TestMLForecaster.get_test_params() - params_json = json.dumps(params) costfun = 'profit' action = 'forecast-model-fit' # fit, predict and tune methods - params = copy.deepcopy(json.loads(params_json)) + # Create runtime parameters runtimeparams = { - "days_to_retrieve": 20, + 'historic_days_to_retrieve': 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -62,22 +58,25 @@ def setUp(self): runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params['optim_conf']['load_forecast_method'] = 'skforecast' + #Create input dictionary params_json = json.dumps(params) self.input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, action, logger, get_data_from_file=True) + # Create MLForcaster object data = copy.deepcopy(self.input_data_dict['df_input_data']) model_type = self.input_data_dict['params']['passed_data']['model_type'] var_model = self.input_data_dict['params']['passed_data']['var_model'] sklearn_model = self.input_data_dict['params']['passed_data']['sklearn_model'] num_lags = self.input_data_dict['params']['passed_data']['num_lags'] self.mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) - + # Create RetrieveHass Object get_data_from_file = True params = None - self.retrieve_hass_conf, self.optim_conf, _ = utils.get_yaml_parse(emhass_conf, use_secrets=False) + self.retrieve_hass_conf, self.optim_conf, _ = utils.get_yaml_parse(params_json,logger) self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=get_data_from_file) + self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], + params_json, emhass_conf, logger, get_data_from_file=get_data_from_file) + # Open and extract saved sensor data to test against with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) diff --git a/tests/test_machine_learning_regressor.py b/tests/test_machine_learning_regressor.py index 6cea4701..1bdc5ba8 100644 --- a/tests/test_machine_learning_regressor.py +++ b/tests/test_machine_learning_regressor.py @@ -7,18 +7,20 @@ import numpy as np import pandas as pd -import yaml from emhass import utils from emhass.command_line import set_input_data_dict from emhass.machine_learning_regressor import MLRegressor from sklearn.pipeline import Pipeline # the root folder -root = str(utils.get_root(__file__, num_parent=2)) +root = pathlib.Path(utils.get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf["config_path"] = pathlib.Path(root) / "config_emhass.yaml" -emhass_conf["data_path"] = pathlib.Path(root) / "data/" -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' + + # create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) @@ -27,28 +29,24 @@ class TestMLRegressor(unittest.TestCase): @staticmethod def get_test_params(): - with open(emhass_conf["config_path"]) as file: - params = yaml.safe_load(file) - params.update( - { - "params_secrets": { - "hass_url": "http://supervisor/core/api", - "long_lived_token": "${SUPERVISOR_TOKEN}", - "time_zone": "Europe/Paris", - "lat": 45.83, - "lon": 6.86, - "alt": 8000.0, - }, - }, - ) + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) return params def setUp(self): + # parameters params = TestMLRegressor.get_test_params() - params_json = json.dumps(params) costfun = "profit" action = "regressor-model-fit" # fit and predict methods - params = copy.deepcopy(json.loads(params_json)) + params["optim_conf"]['load_forecast_method'] = "skforecast" + # runtime parameters runtimeparams = { "csv_file": "heating_prediction.csv", "features": ["degreeday", "solar"], @@ -59,10 +57,10 @@ def setUp(self): "date_features": ["month", "day_of_week"], "new_values": [12.79, 4.766, 1, 2] } - runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]["load_forecast_method"] = "skforecast" + runtimeparams_json = json.dumps(runtimeparams) params_json = json.dumps(params) + # build data dictionary self.input_data_dict = set_input_data_dict( emhass_conf, costfun, @@ -74,6 +72,7 @@ def setUp(self): ) data = copy.deepcopy(self.input_data_dict["df_input_data"]) self.assertIsInstance(data, pd.DataFrame) + # create MLRegressor object self.csv_file = self.input_data_dict["params"]["passed_data"]["csv_file"] features = self.input_data_dict["params"]["passed_data"]["features"] target = self.input_data_dict["params"]["passed_data"]["target"] @@ -96,10 +95,12 @@ def setUp(self): logger, ) + # Test Regressor fit def test_fit(self): self.mlr.fit(self.date_features) self.assertIsInstance(self.mlr.model, Pipeline) + # Test Regressor tune def test_predict(self): self.mlr.fit(self.date_features) predictions = self.mlr.predict(self.new_values) diff --git a/tests/test_optimization.py b/tests/test_optimization.py index a2a1bf7e..b53061e8 100644 --- a/tests/test_optimization.py +++ b/tests/test_optimization.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import json import os import unittest import pandas as pd @@ -13,15 +14,18 @@ from emhass.retrieve_hass import RetrieveHass from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger +from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_params, build_secrets from pandas.testing import assert_series_equal # the root folder -root = str(get_root(__file__, num_parent=2)) +root = pathlib.Path(get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' + + # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) @@ -30,39 +34,52 @@ class TestOptimization(unittest.TestCase): def setUp(self): get_data_from_file = True - params = '{"passed_data": {}}' - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(emhass_conf, use_secrets=False) + params = {} + # Obtain configs and build params + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(json.dumps(params),logger) self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \ retrieve_hass_conf, optim_conf, plant_conf + #Build RetrieveHass object self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], + json.dumps(params), emhass_conf, logger) + # Obtain sensor values from saved file if get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) - self.retrieve_hass_conf['var_load'] = str(self.var_list[0]) - self.retrieve_hass_conf['var_PV'] = str(self.var_list[1]) - self.retrieve_hass_conf['var_interp'] = [retrieve_hass_conf['var_PV'], retrieve_hass_conf['var_load']] - self.retrieve_hass_conf['var_replace_zero'] = [retrieve_hass_conf['var_PV']] + self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) + self.retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) + self.retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] + self.retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + # Else obtain sensor values from HA else: - self.days_list = get_days_list(self.retrieve_hass_conf['days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['var_load'], self.retrieve_hass_conf['var_PV']] + self.days_list = get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) + self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] self.rh.get_data(self.days_list, self.var_list, minimal_response=False, significant_changes_only=False) - self.rh.prepare_data(self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'], + # Prepare data for optimization + self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['var_replace_zero'], - var_interp = self.retrieve_hass_conf['var_interp']) + var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], + var_interp = self.retrieve_hass_conf['sensor_linear_interp']) self.df_input_data = self.rh.df_final.copy() - + #Build Forecast object self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) + json.dumps(params), emhass_conf, logger, get_data_from_file=get_data_from_file) self.df_weather = self.fcst.get_weather_forecast(method='csv') self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather) self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method']) self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1) self.df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - + #Build Optimization object self.costfun = 'profit' self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, @@ -73,6 +90,7 @@ def setUp(self): 'retrieve_hass_conf': retrieve_hass_conf, } + # Check formatting of output from perfect optimization def test_perform_perfect_forecast_optim(self): self.opt_res = self.opt.perform_perfect_forecast_optim(self.df_input_data, self.days_list) self.assertIsInstance(self.opt_res, type(pd.DataFrame())) @@ -80,7 +98,9 @@ def test_perform_perfect_forecast_optim(self): self.assertIsInstance(self.opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertTrue('cost_fun_'+self.costfun in self.opt_res.columns) + def test_perform_dayahead_forecast_optim(self): + # Check formatting of output from dayahead optimization self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( @@ -90,7 +110,7 @@ def test_perform_dayahead_forecast_optim(self): self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) self.assertTrue(self.opt_res_dayahead['P_deferrable0'].sum()*( - self.retrieve_hass_conf['freq'].seconds/3600) == self.optim_conf['P_deferrable_nom'][0]*self.optim_conf['def_total_hours'][0]) + self.retrieve_hass_conf['optimization_time_step'].seconds/3600) == self.optim_conf['nominal_power_of_deferrable_loads'][0]*self.optim_conf['operating_hours_of_each_deferrable_load'][0]) # Test the battery, dynamics and grid exchange contraints self.optim_conf.update({'set_use_battery': True}) self.optim_conf.update({'set_nocharge_from_grid': True}) @@ -104,7 +124,7 @@ def test_perform_dayahead_forecast_optim(self): self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) self.assertTrue('P_batt' in self.opt_res_dayahead.columns) self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns) - self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], self.plant_conf['SOCtarget']) + self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], self.plant_conf['battery_target_state_of_charge']) # Test table conversion opt_res = pd.read_csv(emhass_conf['data_path'] / 'opt_res_latest.csv', index_col='timestamp') cost_cols = [i for i in opt_res.columns if 'cost_' in i] @@ -114,40 +134,40 @@ def test_perform_dayahead_forecast_optim(self): # Check status self.assertTrue('optim_status' in self.opt_res_dayahead.columns) # Test treat_def_as_semi_cont and set_def_constant constraints - self.optim_conf.update({'treat_def_as_semi_cont': [True, True]}) - self.optim_conf.update({'set_def_constant': [True, True]}) + self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [True, True]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [True, True]}) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_def_as_semi_cont': [False, True]}) - self.optim_conf.update({'set_def_constant': [True, True]}) + self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, True]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [True, True]}) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_def_as_semi_cont': [False, True]}) - self.optim_conf.update({'set_def_constant': [False, True]}) + self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, True]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [False, True]}) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_def_as_semi_cont': [False, False]}) - self.optim_conf.update({'set_def_constant': [False, True]}) + self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, False]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [False, True]}) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_def_as_semi_cont': [False, False]}) - self.optim_conf.update({'set_def_constant': [False, False]}) + self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, False]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [False, False]}) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) @@ -177,7 +197,9 @@ def test_perform_dayahead_forecast_optim(self): self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) self.assertTrue(self.opt.optim_status == 'Optimal') - def test_perform_dayahead_forecast_optim_costfun_selfconso(self): + + # Check formatting of output from dayahead optimization in self-consumption + def test_perform_dayahead_forecast_optim_costfun_selfconsumption(self): costfun = 'self-consumption' self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, @@ -191,6 +213,7 @@ def test_perform_dayahead_forecast_optim_costfun_selfconso(self): self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertTrue('cost_fun_selfcons' in self.opt_res_dayahead.columns) + # Check formatting of output from dayahead optimization in cost def test_perform_dayahead_forecast_optim_costfun_cost(self): costfun = 'cost' self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, @@ -205,10 +228,11 @@ def test_perform_dayahead_forecast_optim_costfun_cost(self): self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertTrue('cost_fun_cost' in self.opt_res_dayahead.columns) + # def test_perform_dayahead_forecast_optim_aux(self): - self.optim_conf['treat_def_as_semi_cont'] = [False, False] + self.optim_conf['treat_deferrable_load_as_semi_cont'] = [False, False] self.optim_conf['set_total_pv_sell'] = True - self.optim_conf['set_def_constant'] = [True, True] + self.optim_conf['set_deferrable_load_single_constant'] = [True, True] self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) @@ -219,12 +243,13 @@ def test_perform_dayahead_forecast_optim_aux(self): self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + # Test dayahead optimization using different solvers import pulp as pl solver_list = pl.listSolvers(onlyAvailable=True) for solver in solver_list: self.optim_conf['lp_solver'] = solver - if os.getenv("LP_SOLVER_PATH", default=None) == None: - self.optim_conf['lp_solver_path'] = os.getenv("LP_SOLVER_PATH", default=None) + if os.getenv('lp_solver_path', default=None) == None: + self.optim_conf['lp_solver_path'] = os.getenv('lp_solver_path', default=None) self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) @@ -257,9 +282,10 @@ def test_perform_naive_mpc_optim(self): self.assertTrue('P_batt' in self.opt_res_dayahead.columns) self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns) self.assertTrue(np.abs(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt']-soc_final)<1e-3) - term1 = self.optim_conf['P_deferrable_nom'][0]*def_total_hours[0] - term2 = self.opt_res_dayahead['P_deferrable0'].sum()*(self.retrieve_hass_conf['freq'].seconds/3600) + term1 = self.optim_conf['nominal_power_of_deferrable_loads'][0]*def_total_hours[0] + term2 = self.opt_res_dayahead['P_deferrable0'].sum()*(self.retrieve_hass_conf['optimization_time_step'].seconds/3600) self.assertTrue(np.abs(term1-term2)<1e-3) + # soc_init = 0.8 soc_final = 0.5 self.opt_res_dayahead = self.opt.perform_naive_mpc_optim( @@ -267,6 +293,7 @@ def test_perform_naive_mpc_optim(self): soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours, def_start_timestep=def_start_timestep, def_end_timestep=def_end_timestep) self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], soc_final) + # Test format output of dayahead optimization with a thermal deferrable load def test_thermal_load_optim(self): self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) @@ -300,15 +327,16 @@ def test_thermal_load_optim(self): self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) self.assertTrue(self.opt.optim_status == 'Optimal') + # Setup function to run dayahead optimization for the following tests def run_penalty_test_forecast(self): self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, self.fcst.var_load_cost, self.fcst.var_prod_price, self.costfun, emhass_conf, logger) - def_total_hours = [5 * self.retrieve_hass_conf["freq"].seconds / 3600.0] + def_total_hours = [5 * self.retrieve_hass_conf['optimization_time_step'].seconds / 3600.0] def_start_timestep = [0] def_end_timestep = [0] prediction_horizon = 10 - self.optim_conf.update({"num_def_loads": 1}) + self.optim_conf.update({'number_of_deferrable_loads': 1}) self.fcst.params["passed_data"]["prod_price_forecast"] = [0 for i in range(prediction_horizon)] self.fcst.params["passed_data"]["solar_forecast_kwp"] = [ @@ -333,71 +361,76 @@ def run_penalty_test_forecast(self): def_end_timestep=def_end_timestep ) + # Test load is constant def test_constant_load(self): self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] - self.optim_conf.update({"set_def_constant": [True]}) + self.optim_conf.update({'set_deferrable_load_single_constant': [True]}) self.run_penalty_test_forecast() assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf["P_deferrable_nom"][0] + self.optim_conf['nominal_power_of_deferrable_loads'][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), check_names=False, ) + # Test no startup penalty when bump is small def test_startup_penalty_continuous_with_small_bump(self): self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] - self.optim_conf.update({"def_start_penalty": [100.0]}) + self.optim_conf.update({'set_deferrable_startup_penalty': [100.0]}) self.run_penalty_test_forecast() assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf["P_deferrable_nom"][0] + self.optim_conf['nominal_power_of_deferrable_loads'][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), check_names=False, ) + # Test startup penalty def test_startup_penalty_discontinuity_when_justified(self): self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] - self.optim_conf.update({"def_start_penalty": [0.1]}) + self.optim_conf.update({'set_deferrable_startup_penalty': [0.1]}) self.run_penalty_test_forecast() assert_series_equal(self.opt_res_dayahead["P_deferrable0"], - self.optim_conf["P_deferrable_nom"][0] * + self.optim_conf['nominal_power_of_deferrable_loads'][0] * pd.Series([0, 1, 1, 1, 1, 0, 1, 0, 0, 0], index=self.opt_res_dayahead.index), check_names=False) + # Test penalty continuity when deferrable load is already on def test_startup_penalty_no_discontinuity_at_start(self): self.fcst.params["passed_data"]["load_cost_forecast"] = [1.2,1,1,1,1,1.1,2,2,2,2] self.optim_conf.update({ - "def_start_penalty": [100.0], + 'set_deferrable_startup_penalty': [100.0], "def_current_state": [True], }) self.run_penalty_test_forecast() assert_series_equal(self.opt_res_dayahead["P_deferrable0"], - self.optim_conf["P_deferrable_nom"][0] * + self.optim_conf['nominal_power_of_deferrable_loads'][0] * pd.Series([1, 1, 1, 1, 1, 0, 0, 0, 0, 0], index=self.opt_res_dayahead.index), check_names=False) + # Test delay start def test_startup_penalty_delayed_start(self): self.fcst.params["passed_data"]["load_cost_forecast"] = [1.2,1,1,1,1,1.1,2,2,2,2] self.optim_conf.update( { - "def_start_penalty": [100.0], + 'set_deferrable_startup_penalty': [100.0], "def_current_state": [False], } ) @@ -406,7 +439,7 @@ def test_startup_penalty_delayed_start(self): assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf["P_deferrable_nom"][0] + self.optim_conf['nominal_power_of_deferrable_loads'][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), diff --git a/tests/test_retrieve_hass.py b/tests/test_retrieve_hass.py index 3526334c..6a3db80e 100644 --- a/tests/test_retrieve_hass.py +++ b/tests/test_retrieve_hass.py @@ -3,21 +3,27 @@ import datetime import unittest -import requests_mock +import requests_mock import numpy as np, pandas as pd -import pytz, pathlib, pickle, json, yaml, copy +import pytz, pathlib, pickle, json, copy import bz2 import pickle as cPickle +from emhass import utils from emhass.retrieve_hass import RetrieveHass -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger +from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_params # the root folder -root = str(get_root(__file__, num_parent=2)) +root = pathlib.Path(get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['options_path'] = root / 'options.json' +emhass_conf['secrets_path'] = root / 'secrets_emhass(example).yaml' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' + + # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) @@ -27,81 +33,76 @@ class TestRetrieveHass(unittest.TestCase): def setUp(self): get_data_from_file = True save_data_to_file = False - params = None - retrieve_hass_conf, _, _ = get_yaml_parse(emhass_conf, use_secrets=False) - - #Force config params for testing - retrieve_hass_conf['var_PV'] = 'sensor.power_photovoltaics' - retrieve_hass_conf['var_load'] = 'sensor.power_load_no_var_loads' - retrieve_hass_conf['var_replace_zero'] = ['sensor.power_photovoltaics'] - retrieve_hass_conf['var_interp'] = ['sensor.power_photovoltaics','sensor.power_load_no_var_loads'] - + + params = {} + # Build secrets file with 'secrets_emhass(example).yaml' + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger,secrets_path=emhass_conf['secrets_path'] ) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, {}, logger=logger)) + retrieve_hass_conf, _, _ = get_yaml_parse(params,logger) + + # Force config params for testing + retrieve_hass_conf["optimization_time_step"] = pd.to_timedelta(30, "minutes") + retrieve_hass_conf['sensor_power_photovoltaics'] = 'sensor.power_photovoltaics' + retrieve_hass_conf['sensor_power_load_no_var_loads'] = 'sensor.power_load_no_var_loads' + retrieve_hass_conf['sensor_replace_zero'] = ['sensor.power_photovoltaics'] + retrieve_hass_conf['sensor_linear_interp'] = ['sensor.power_photovoltaics','sensor.power_load_no_var_loads'] + retrieve_hass_conf['set_zero_min'] = True + retrieve_hass_conf['load_negative'] = True + self.retrieve_hass_conf = retrieve_hass_conf self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'], + self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], params, emhass_conf, logger, get_data_from_file=get_data_from_file) + # Obtain sensor values from saved file if get_data_from_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) + # Else obtain sensor values from HA else: - self.days_list = get_days_list(self.retrieve_hass_conf['days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['var_load'], self.retrieve_hass_conf['var_PV']] + self.days_list = get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) + self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] self.rh.get_data(self.days_list, self.var_list, minimal_response=False, significant_changes_only=False) + # Check to save updated data to file if save_data_to_file: with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'wb') as outp: pickle.dump((self.rh.df_final, self.days_list, self.var_list), outp, pickle.HIGHEST_PROTOCOL) self.df_raw = self.rh.df_final.copy() + # Check yaml parse in setUp worked def test_get_yaml_parse(self): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 4807.8 - } - }) - params = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(emhass_conf, - use_secrets=True, params=params) - self.assertIsInstance(retrieve_hass_conf, dict) - self.assertTrue('hass_url' in retrieve_hass_conf.keys()) - self.assertTrue(retrieve_hass_conf['hass_url'] == 'http://supervisor/core/api') - self.assertIsInstance(optim_conf, dict) - self.assertIsInstance(plant_conf, dict) + self.assertIsInstance(self.retrieve_hass_conf, dict) + self.assertTrue('hass_url' in self.retrieve_hass_conf.keys()) + self.assertTrue(self.retrieve_hass_conf['hass_url'] == 'https://myhass.duckdns.org/') - def test_yaml_parse_wab_server(self): - with open(emhass_conf['config_path'], 'r') as file: - config = yaml.load(file, Loader=yaml.FullLoader) - retrieve_hass_conf = config['retrieve_hass_conf'] - optim_conf = config['optim_conf'] - plant_conf = config['plant_conf'] + # Check yaml parse worked + def test_yaml_parse_web_server(self): params = {} - params['retrieve_hass_conf'] = retrieve_hass_conf - params['optim_conf'] = optim_conf - params['plant_conf'] = plant_conf + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + params.update(utils.build_params(emhass_conf, {}, defaults, logger)) + _, optim_conf, _ = get_yaml_parse(params,logger) # Just check forecast methods - self.assertFalse(params['optim_conf'].get('weather_forecast_method') == None) - self.assertFalse(params['optim_conf'].get('load_forecast_method') == None) - self.assertFalse(params['optim_conf'].get('load_cost_forecast_method') == None) - self.assertFalse(params['optim_conf'].get('prod_price_forecast_method') == None) + self.assertFalse(optim_conf.get('weather_forecast_method') == None) + self.assertFalse(optim_conf.get('load_forecast_method') == None) + self.assertFalse(optim_conf.get('load_cost_forecast_method') == None) + self.assertFalse(optim_conf.get('production_price_forecast_method') == None) + # Assume get_data to HA fails def test_get_data_failed(self): days_list = get_days_list(1) - var_list = [self.retrieve_hass_conf['var_load']] + var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads']] response = self.rh.get_data(days_list, var_list) self.assertFalse(response) + # Test with html mock response def test_get_data_mock(self): with requests_mock.mock() as m: days_list = get_days_list(1) - var_list = [self.retrieve_hass_conf['var_load']] + var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads']] data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_get_data_get_method.pbz2'), "rb") data = cPickle.load(data) m.get(self.retrieve_hass_conf['hass_url'], json=data.json()) @@ -112,41 +113,45 @@ def test_get_data_mock(self): self.assertIsInstance(self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex) self.assertIsInstance(self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertEqual(len(self.rh.df_final.columns), len(var_list)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['freq']) + self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) self.assertEqual(self.rh.df_final.index.tz, datetime.timezone.utc) + + # Check the dataframe was formatted correctly def test_prepare_data(self): self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) self.assertIsInstance(self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex) self.assertIsInstance(self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) self.assertEqual(len(self.rh.df_final.columns), len(self.var_list)) self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), len(self.days_list)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['freq']) + self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) self.assertEqual(self.rh.df_final.index.tz, datetime.timezone.utc) - self.rh.prepare_data(self.retrieve_hass_conf['var_load'], + self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['var_replace_zero'], - var_interp = self.retrieve_hass_conf['var_interp']) + var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], + var_interp = self.retrieve_hass_conf['sensor_linear_interp']) self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), self.df_raw.index.isin(self.days_list).sum()) self.assertEqual(len(self.rh.df_final.columns), len(self.df_raw.columns)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['freq']) + self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) self.assertEqual(self.rh.df_final.index.tz, self.retrieve_hass_conf['time_zone']) + # Test negative load def test_prepare_data_negative_load(self): - self.rh.df_final[self.retrieve_hass_conf['var_load']] = -self.rh.df_final[self.retrieve_hass_conf['var_load']] - self.rh.prepare_data(self.retrieve_hass_conf['var_load'], + self.rh.df_final[self.retrieve_hass_conf['sensor_power_load_no_var_loads']] = -self.rh.df_final[self.retrieve_hass_conf['sensor_power_load_no_var_loads']] + self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = True, set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['var_replace_zero'], + var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], var_interp = None) self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), self.df_raw.index.isin(self.days_list).sum()) self.assertEqual(len(self.rh.df_final.columns), len(self.df_raw.columns)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['freq']) + self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) self.assertEqual(self.rh.df_final.index.tz, self.retrieve_hass_conf['time_zone']) + # Test publish data def test_publish_data(self): response, data = self.rh.post_data(self.df_raw[self.df_raw.columns[0]], 25, 'sensor.p_pv_forecast', "Unit", "Variable", diff --git a/tests/test_utils.py b/tests/test_utils.py index bb1ce018..7a0a72fe 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,73 +3,83 @@ import unittest import pandas as pd -import pathlib, json, yaml, copy +import pathlib, json from emhass import utils -# the root folder -root = str(utils.get_root(__file__, num_parent=2)) +# The root folder +root = pathlib.Path(utils.get_root(__file__, num_parent=2)) emhass_conf = {} -emhass_conf['config_path'] = pathlib.Path(root) / 'config_emhass.yaml' -emhass_conf['data_path'] = pathlib.Path(root) / 'data/' -emhass_conf['root_path'] = pathlib.Path(root) / 'src/emhass/' +emhass_conf['data_path'] = root / 'data/' +emhass_conf['root_path'] = root / 'src/emhass/' +emhass_conf['options_path'] = root / 'options.json' +emhass_conf['secrets_path'] = root / 'secrets_emhass(example).yaml' +emhass_conf['defaults_path'] = emhass_conf['data_path'] / 'config_defaults.json' +emhass_conf['associations_path'] = emhass_conf['data_path'] / 'associations.csv' -# create logger +# Create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) class TestCommandLineUtils(unittest.TestCase): @staticmethod def get_test_params(): - with open(emhass_conf['config_path'], 'r') as file: - params = yaml.load(file, Loader=yaml.FullLoader) - params.update({ - 'params_secrets': { - 'hass_url': 'http://supervisor/core/api', - 'long_lived_token': '${SUPERVISOR_TOKEN}', - 'time_zone': 'Europe/Paris', - 'lat': 45.83, - 'lon': 6.86, - 'alt': 8000.0 - } - }) + params = {} + if emhass_conf['defaults_path'].exists(): + with emhass_conf['defaults_path'].open('r') as data: + defaults = json.load(data) + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + built_secrets['Altitude'] = 8000.0 + params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + else: + raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + return params def setUp(self): params = TestCommandLineUtils.get_test_params() + # Add runtime parameters for forecast lists runtimeparams = { - 'pv_power_forecast':[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48], - 'load_power_forecast':[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48], - 'load_cost_forecast':[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48], - 'prod_price_forecast':[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48] + 'pv_power_forecast':[i+1 for i in range(48)], + 'load_power_forecast':[i+1 for i in range(48)], + 'load_cost_forecast':[i+1 for i in range(48)], + 'prod_price_forecast':[i+1 for i in range(48)] } self.runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params['optim_conf']['weather_forecast_method'] = 'list' params['optim_conf']['load_forecast_method'] = 'list' params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['prod_price_forecast_method'] = 'list' + params['optim_conf']['production_price_forecast_method'] = 'list' self.params_json = json.dumps(params) def test_get_yaml_parse(self): - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=False) + # Test get_yaml_parse with only secrets + params = {} + updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + emhass_conf.update(updated_emhass_conf) + params.update(utils.build_params(emhass_conf, built_secrets, {}, logger)) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(json.dumps(params),logger) self.assertIsInstance(retrieve_hass_conf, dict) self.assertIsInstance(optim_conf, dict) self.assertIsInstance(plant_conf, dict) - self.assertTrue(retrieve_hass_conf['alt'] == 4807.8) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=self.params_json) - self.assertTrue(retrieve_hass_conf['alt'] == 8000.0) + self.assertTrue(retrieve_hass_conf['Altitude'] == 4807.8) + # Test get_yaml_parse with built params in get_test_params + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) + self.assertTrue(retrieve_hass_conf['Altitude'] == 8000.0) def test_get_forecast_dates(self): - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=self.params_json) - freq = int(retrieve_hass_conf['freq'].seconds/60.0) - delta_forecast = int(optim_conf['delta_forecast'].days) + retrieve_hass_conf, optim_conf, _ = utils.get_yaml_parse(self.params_json,logger) + freq = int(retrieve_hass_conf['optimization_time_step'].seconds/60.0) + delta_forecast = int(optim_conf['delta_forecast_daily'].days) forecast_dates = utils.get_forecast_dates(freq, delta_forecast) self.assertIsInstance(forecast_dates, pd.core.indexes.datetimes.DatetimeIndex) self.assertTrue(len(forecast_dates)==int(delta_forecast*60*24/freq)) def test_treat_runtimeparams(self): - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=self.params_json) + # Test dayahead runtime params + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) set_type = 'dayahead-optim' params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( self.runtimeparams_json, self.params_json, @@ -83,7 +93,8 @@ def test_treat_runtimeparams(self): self.assertTrue(optim_conf['weather_forecast_method'] == 'list') self.assertTrue(optim_conf['load_forecast_method'] == 'list') self.assertTrue(optim_conf['load_cost_forecast_method'] == 'list') - self.assertTrue(optim_conf['prod_price_forecast_method'] == 'list') + self.assertTrue(optim_conf['production_price_forecast_method'] == 'list') + # Test naive MPC runtime params set_type = 'naive-mpc-optim' params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( self.runtimeparams_json, self.params_json, @@ -91,38 +102,22 @@ def test_treat_runtimeparams(self): self.assertIsInstance(params, str) params = json.loads(params) self.assertTrue(params['passed_data']['prediction_horizon'] == 10) - self.assertTrue(params['passed_data']['soc_init'] == plant_conf['SOCtarget']) - self.assertTrue(params['passed_data']['soc_final'] == plant_conf['SOCtarget']) - self.assertTrue(params['passed_data']['def_total_hours'] == optim_conf['def_total_hours']) - # This will be the case when using emhass in standalone mode - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=self.params_json) - params = json.dumps(None) - params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - self.runtimeparams_json, params, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - self.assertIsInstance(params, str) - params = json.loads(params) - self.assertIsInstance(params['passed_data']['pv_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_cost_forecast'], list) - self.assertIsInstance(params['passed_data']['prod_price_forecast'], list) - self.assertTrue(optim_conf['weather_forecast_method'] == 'list') - self.assertTrue(optim_conf['load_forecast_method'] == 'list') - self.assertTrue(optim_conf['load_cost_forecast_method'] == 'list') - self.assertTrue(optim_conf['prod_price_forecast_method'] == 'list') - # Test passing optimization and plant configuration parameters at runtime + self.assertTrue(params['passed_data']['soc_init'] == plant_conf['battery_target_state_of_charge']) + self.assertTrue(params['passed_data']['soc_final'] == plant_conf['battery_target_state_of_charge']) + self.assertTrue(params['passed_data']['operating_hours_of_each_deferrable_load'] == optim_conf['operating_hours_of_each_deferrable_load']) + # Test passing optimization and plant configuration parameters at runtime runtimeparams = json.loads(self.runtimeparams_json) - runtimeparams.update({'num_def_loads':3}) - runtimeparams.update({'P_deferrable_nom':[3000.0, 750.0, 2500.0]}) - runtimeparams.update({'def_total_hours':[5, 8, 10]}) - runtimeparams.update({'treat_def_as_semi_cont':[True, True, True]}) - runtimeparams.update({'set_def_constant':[False, False, False]}) + runtimeparams.update({'number_of_deferrable_loads':3}) + runtimeparams.update({'nominal_power_of_deferrable_loads':[3000.0, 750.0, 2500.0]}) + runtimeparams.update({'operating_hours_of_each_deferrable_load':[5, 8, 10]}) + runtimeparams.update({'treat_deferrable_load_as_semi_cont':[True, True, True]}) + runtimeparams.update({'set_deferrable_load_single_constant':[False, False, False]}) runtimeparams.update({'weight_battery_discharge':2.0}) runtimeparams.update({'weight_battery_charge':2.0}) runtimeparams.update({'solcast_api_key':'yoursecretsolcastapikey'}) runtimeparams.update({'solcast_rooftop_id':'yourrooftopid'}) runtimeparams.update({'solar_forecast_kwp':5.0}) - runtimeparams.update({'SOCtarget':0.4}) + runtimeparams.update({'battery_target_state_of_charge':0.4}) runtimeparams.update({'publish_prefix':'emhass_'}) runtimeparams.update({'custom_pv_forecast_id':'my_custom_pv_forecast_id'}) runtimeparams.update({'custom_load_forecast_id':'my_custom_load_forecast_id'}) @@ -134,11 +129,10 @@ def test_treat_runtimeparams(self): runtimeparams.update({'custom_unit_load_cost_id':'my_custom_unit_load_cost_id'}) runtimeparams.update({'custom_unit_prod_price_id':'my_custom_unit_prod_price_id'}) runtimeparams.update({'custom_deferrable_forecast_id':'my_custom_deferrable_forecast_id'}) - runtimeparams_json = json.dumps(runtimeparams) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=self.params_json) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) set_type = 'dayahead-optim' params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, self.params_json, + runtimeparams, self.params_json, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) self.assertIsInstance(params, str) params = json.loads(params) @@ -146,17 +140,17 @@ def test_treat_runtimeparams(self): self.assertIsInstance(params['passed_data']['load_power_forecast'], list) self.assertIsInstance(params['passed_data']['load_cost_forecast'], list) self.assertIsInstance(params['passed_data']['prod_price_forecast'], list) - self.assertTrue(optim_conf['num_def_loads'] == 3) - self.assertTrue(optim_conf['P_deferrable_nom'] == [3000.0, 750.0, 2500.0]) - self.assertTrue(optim_conf['def_total_hours'] == [5, 8, 10]) - self.assertTrue(optim_conf['treat_def_as_semi_cont'] == [True, True, True]) - self.assertTrue(optim_conf['set_def_constant'] == [False, False, False]) + self.assertTrue(optim_conf['number_of_deferrable_loads'] == 3) + self.assertTrue(optim_conf['nominal_power_of_deferrable_loads'] == [3000.0, 750.0, 2500.0]) + self.assertTrue(optim_conf['operating_hours_of_each_deferrable_load'] == [5, 8, 10]) + self.assertTrue(optim_conf['treat_deferrable_load_as_semi_cont'] == [True, True, True]) + self.assertTrue(optim_conf['set_deferrable_load_single_constant'] == [False, False, False]) self.assertTrue(optim_conf['weight_battery_discharge'] == 2.0) self.assertTrue(optim_conf['weight_battery_charge'] == 2.0) self.assertTrue(retrieve_hass_conf['solcast_api_key'] == 'yoursecretsolcastapikey') self.assertTrue(retrieve_hass_conf['solcast_rooftop_id'] == 'yourrooftopid') self.assertTrue(retrieve_hass_conf['solar_forecast_kwp'] == 5.0) - self.assertTrue(plant_conf['SOCtarget'] == 0.4) + self.assertTrue(plant_conf['battery_target_state_of_charge'] == 0.4) self.assertTrue(params['passed_data']['publish_prefix'] == 'emhass_') self.assertTrue(params['passed_data']['custom_pv_forecast_id'] == 'my_custom_pv_forecast_id') self.assertTrue(params['passed_data']['custom_load_forecast_id'] == 'my_custom_load_forecast_id') @@ -170,6 +164,7 @@ def test_treat_runtimeparams(self): self.assertTrue(params['passed_data']['custom_deferrable_forecast_id'] == 'my_custom_deferrable_forecast_id') def test_treat_runtimeparams_failed(self): + # Test treatment of nan values params = TestCommandLineUtils.get_test_params() runtimeparams = { 'pv_power_forecast':[1,2,3,4,5,'nan',7,8,9,10], @@ -177,24 +172,22 @@ def test_treat_runtimeparams_failed(self): 'load_cost_forecast':[1,2,3,4,5,6,7,8,'nan',10], 'prod_price_forecast':[1,2,3,4,'nan',6,7,8,9,10] } - runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params['optim_conf']['weather_forecast_method'] = 'list' params['optim_conf']['load_forecast_method'] = 'list' params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['prod_price_forecast_method'] = 'list' - params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=params_json) + params['optim_conf']['production_price_forecast_method'] = 'list' + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) set_type = 'dayahead-optim' params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, params_json, + runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - params = json.loads(params) - runtimeparams = json.loads(runtimeparams_json) + self.assertTrue(len([x for x in runtimeparams['pv_power_forecast'] if not str(x).isdigit()])>0) self.assertTrue(len([x for x in runtimeparams['load_power_forecast'] if not str(x).isdigit()])>0) self.assertTrue(len([x for x in runtimeparams['load_cost_forecast'] if not str(x).isdigit()])>0) self.assertTrue(len([x for x in runtimeparams['prod_price_forecast'] if not str(x).isdigit()])>0) + # Test list embedded into a string params = TestCommandLineUtils.get_test_params() runtimeparams = { 'pv_power_forecast':'[1,2,3,4,5,6,7,8,9,10]', @@ -202,56 +195,59 @@ def test_treat_runtimeparams_failed(self): 'load_cost_forecast':'[1,2,3,4,5,6,7,8,9,10]', 'prod_price_forecast':'[1,2,3,4,5,6,7,8,9,10]' } - runtimeparams_json = json.dumps(runtimeparams) params['passed_data'] = runtimeparams params['optim_conf']['weather_forecast_method'] = 'list' params['optim_conf']['load_forecast_method'] = 'list' params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['prod_price_forecast_method'] = 'list' - params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(emhass_conf, use_secrets=True, params=params_json) + params['optim_conf']['production_price_forecast_method'] = 'list' + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) set_type = 'dayahead-optim' params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, params_json, + runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - params = json.loads(params) - runtimeparams = json.loads(runtimeparams_json) self.assertIsInstance(runtimeparams['pv_power_forecast'], str) self.assertIsInstance(runtimeparams['load_power_forecast'], str) self.assertIsInstance(runtimeparams['load_cost_forecast'], str) self.assertIsInstance(runtimeparams['prod_price_forecast'], str) - def test_build_params(self): - with open(emhass_conf['config_path'], 'r') as file: - config = yaml.load(file, Loader=yaml.FullLoader) - retrieve_hass_conf = config['retrieve_hass_conf'] - optim_conf = config['optim_conf'] - plant_conf = config['plant_conf'] - params = {} - params['retrieve_hass_conf'] = retrieve_hass_conf - params['optim_conf'] = optim_conf - params['plant_conf'] = plant_conf - options_json = emhass_conf['config_path'].parent / "options.json" - # Read options info - with options_json.open('r') as data: - options = json.load(data) - with open(emhass_conf['config_path'].parent / "secrets_emhass(example).yaml", 'r') as file: - params_secrets = yaml.load(file, Loader=yaml.FullLoader) - addon = 1 - params = utils.build_params(params, params_secrets, options, addon, logger) + def test_build_secrets(self): + # Test the build_secrets defaults from get_test_params() + params = TestCommandLineUtils.get_test_params() expected_keys = ['retrieve_hass_conf', 'params_secrets', 'optim_conf', 'plant_conf', 'passed_data'] for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['params_secrets']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "https://myhass.duckdns.org/") + self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "thatverylongtokenhere") + # Test Secrets from options.json + params = {} + _, built_secrets = utils.build_secrets(emhass_conf,logger,options_path=emhass_conf["options_path"]) + params = utils.build_params(emhass_conf, built_secrets, {}, logger) + for key in expected_keys: + self.assertTrue(key in params.keys()) + self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "empty") + self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "empty") + # Test Secrets from secrets_emhass(example).yaml + params = {} + _, built_secrets = utils.build_secrets(emhass_conf,logger,secrets_path=emhass_conf["secrets_path"]) + params = utils.build_params(emhass_conf, built_secrets, {}, logger) + for key in expected_keys: + self.assertTrue(key in params.keys()) + self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "https://myhass.duckdns.org/") + self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "thatverylongtokenhere") + # Test Secrets from arguments (command_line cli) params = {} - params['retrieve_hass_conf'] = retrieve_hass_conf - params['optim_conf'] = optim_conf - params['plant_conf'] = plant_conf - addon = 0 - params = utils.build_params(params, params_secrets, options, addon, logger) + _, built_secrets = utils.build_secrets(emhass_conf,logger,{"url":"test.url", "key":"test.key" }) + logger.debug("Obtaining long_lived_token from passed argument") + params = utils.build_params(emhass_conf, built_secrets, {}, logger) for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['params_secrets']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") + self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "test.url") + self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "test.key") + if __name__ == '__main__': unittest.main() From 09a33afffbe4f98b47a3cc056931267587bff83d Mon Sep 17 00:00:00 2001 From: GeoDerp <18461782+GeoDerp@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:53:32 +0000 Subject: [PATCH 04/35] Configuration page init --- .gitignore | 1 - .vscode/tasks.json | 2 +- Dockerfile | 2 +- config.json | 325 ++++---- data/config_defaults.json | 50 +- setup.py | 4 +- src/emhass/static/basic.html | 5 +- src/emhass/static/configuration_list.html | 43 ++ src/emhass/static/configuration_script.js | 693 ++++++++++++++++++ src/emhass/static/data/param_definitions.json | 407 ++++++++++ src/emhass/static/script.js | 665 +++++++++-------- src/emhass/static/style.css | 264 ++++++- src/emhass/templates/configuration.html | 77 ++ src/emhass/templates/index.html | 12 +- src/emhass/utils.py | 13 +- src/emhass/web_server.py | 84 ++- 16 files changed, 2114 insertions(+), 533 deletions(-) create mode 100644 src/emhass/static/configuration_list.html create mode 100644 src/emhass/static/configuration_script.js create mode 100644 src/emhass/static/data/param_definitions.json create mode 100644 src/emhass/templates/configuration.html diff --git a/.gitignore b/.gitignore index 2e8d38d6..d4de9630 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,6 @@ secrets_emhass.yaml .vscode/launch.json .vscode/settings.json .vscode/tasks.json -*.html *.pkl **/app diff --git a/.vscode/tasks.json b/.vscode/tasks.json index f3336bb6..9188cd10 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -20,7 +20,7 @@ "echo": true, "panel": "shared", "focus": true - } + }, }, { "label": "EMHASS install with dependencies", diff --git a/Dockerfile b/Dockerfile index 0a75fff6..ce1603e0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -70,7 +70,7 @@ RUN apt-get purge -y --auto-remove \ && rm -rf /var/lib/apt/lists/* #copy default parameters -COPY config.json /app/ +COPY config.json /share/ #make sure data directory exists RUN mkdir -p /app/data/ diff --git a/config.json b/config.json index cabdca8e..da386eef 100644 --- a/config.json +++ b/config.json @@ -1,164 +1,165 @@ { - "logging_level": "INFO", - "costfun": "profit", - "optimization_time_step": 30, - "historic_days_to_retrieve": 2, - "method_ts_round": "nearest", - "continual_publish": false, - "data_path": "default", - "set_total_pv_sell": false, - "lp_solver": "COIN_CMD", - "lp_solver_path": "/usr/bin/cbc", - "set_nocharge_from_grid": false, - "set_nodischarge_to_grid": true, - "set_battery_dynamic": false, - "battery_dynamic_max": 0.9, - "battery_dynamic_min": -0.9, - "weight_battery_discharge": 1.0, - "weight_battery_charge": 1.0, - "sensor_power_photovoltaics": "sensor.power_photovoltaics", - "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", - "list_sensor_replace_zero": [ - { - "sensor_replace_zero": "sensor.power_photovoltaics" - }, - { - "sensor_replace_zero": "sensor.power_load_no_var_loads" - } - ], - "list_sensor_linear_interp": [ - { - "sensor_linear_interp": "sensor.power_photovoltaics" + "logging_level": "INFO", + "costfun": "profit", + "optimization_time_step": 30, + "historic_days_to_retrieve": 2, + "method_ts_round": "nearest", + "continual_publish": false, + "data_path": "default", + "set_total_pv_sell": false, + "lp_solver": "COIN_CMD", + "lp_solver_path": "/usr/bin/cbc", + "set_nocharge_from_grid": false, + "set_nodischarge_to_grid": true, + "set_battery_dynamic": false, + "battery_dynamic_max": 0.9, + "battery_dynamic_min": -0.9, + "weight_battery_discharge": 1.0, + "weight_battery_charge": 1.0, + "sensor_power_photovoltaics": "sensor.power_photovoltaics", + "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", + "list_sensor_replace_zero": [ + { + "sensor_replace_zero": "sensor.power_photovoltaics" + }, + { + "sensor_replace_zero": "sensor.power_load_no_var_loads" + } + ], + "list_sensor_linear_interp": [ + { + "sensor_linear_interp": "sensor.power_photovoltaics" + }, + { + "sensor_linear_interp": "sensor.power_load_no_var_loads" + } + ], + "load_negative": false, + "set_zero_min": true, + "number_of_deferrable_loads": 2, + "list_nominal_power_of_deferrable_loads": [ + { + "nominal_power_of_deferrable_loads": 3000.0 + }, + { + "nominal_power_of_deferrable_loads": 750.0 + } + ], + "list_operating_hours_of_each_deferrable_load": [ + { + "operating_hours_of_each_deferrable_load": 4 + }, + { + "operating_hours_of_each_deferrable_load": 0 + } + ], + "weather_forecast_method": "scrapper", + "load_forecast_method": "naive", + "delta_forecast_daily": 1, + "load_cost_forecast_method": "hp_hc_periods", + "list_start_timesteps_of_each_deferrable_load": [ + { + "start_timesteps_of_each_deferrable_load": 0 + }, + { + "start_timesteps_of_each_deferrable_load": 0 + } + ], + "list_end_timesteps_of_each_deferrable_load": [ + { + "end_timesteps_of_each_deferrable_load": 0 + }, + { + "end_timesteps_of_each_deferrable_load": 0 + } + ], + "load_peak_hour_periods": { + "period_hp_1": [ + { + "start": "15:24" + }, + { + "end": "02:54" + } + ], + "period_hp_2": [ + { + "start": "15:24" + }, + { + "end": "02:54" + } + ] }, - { - "sensor_linear_interp": "sensor.power_load_no_var_loads" - } - ], - "load_negative": false, - "set_zero_min": true, - "number_of_deferrable_loads": 2, - "list_nominal_power_of_deferrable_loads": [ - { - "nominal_power_of_deferrable_loads": 3000.0 - }, - { - "nominal_power_of_deferrable_loads": 750.0 - } - ], - "list_operating_hours_of_each_deferrable_load": [ - { - "operating_hours_of_each_deferrable_load": 4 - }, - { - "operating_hours_of_each_deferrable_load": 0 - } - ], - "weather_forecast_method": "scrapper", - - "load_forecast_method": "naive", - "delta_forecast_daily": 1, - "load_cost_forecast_method": "hp_hc_periods", - "list_start_timesteps_of_each_deferrable_load": [ - { - "start_timesteps_of_each_deferrable_load": 0 - }, - { - "start_timesteps_of_each_deferrable_load": 0 - } - ], - "list_end_timesteps_of_each_deferrable_load": [ - { - "end_timesteps_of_each_deferrable_load": 0 - }, - { - "end_timesteps_of_each_deferrable_load": 0 - } - ], - "list_peak_hours_periods_start_hours": [ - { - "peak_hours_periods_start_hours": "02:54" - }, - { - "peak_hours_periods_start_hours": "17:24" - } - ], - "list_peak_hours_periods_end_hours": [ - { - "peak_hours_periods_end_hours": "15:24" - }, - { - "peak_hours_periods_end_hours": "20:54" - } - ], - "list_treat_deferrable_load_as_semi_cont": [ - { - "treat_deferrable_load_as_semi_cont": true - }, - { - "treat_deferrable_load_as_semi_cont": true - } - ], - "list_set_deferrable_load_single_constant": [ - { - "set_deferrable_load_single_constant": false - }, - { - "set_deferrable_load_single_constant": false - } - ], - "list_set_deferrable_startup_penalty": [ - { - "set_deferrable_startup_penalty": 0.0 - }, - { - "set_deferrable_startup_penalty": 0.0 - } - ], - "load_peak_hours_cost": 0.1907, - "load_offpeak_hours_cost": 0.1419, - "production_price_forecast_method": "constant", - "photovoltaic_production_sell_price": 0.1419, - "maximum_power_from_grid": 9000, - "maximum_power_to_grid": 9000, - "list_pv_module_model": [ - { - "pv_module_model": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" - } - ], - "list_pv_inverter_model": [ - { - "pv_inverter_model": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" - } - ], - "list_surface_tilt": [ - { - "surface_tilt": 30 - } - ], - "list_surface_azimuth": [ - { - "surface_azimuth": 205 - } - ], - "list_modules_per_string": [ - { - "modules_per_string": 16 - } - ], - "list_strings_per_inverter": [ - { - "strings_per_inverter": 1 - } - ], - "inverter_is_hybrid": false, - "compute_curtailment": false, - "set_use_battery": false, - "battery_discharge_power_max": 1000, - "battery_charge_power_max": 1000, - "battery_discharge_efficiency": 0.95, - "battery_charge_efficiency": 0.95, - "battery_nominal_energy_capacity": 5000, - "battery_minimum_state_of_charge": 0.3, - "battery_maximum_state_of_charge": 0.9, - "battery_target_state_of_charge": 0.6 -} + "list_treat_deferrable_load_as_semi_cont": [ + { + "treat_deferrable_load_as_semi_cont": true + }, + { + "treat_deferrable_load_as_semi_cont": true + } + ], + "list_set_deferrable_load_single_constant": [ + { + "set_deferrable_load_single_constant": false + }, + { + "set_deferrable_load_single_constant": false + } + ], + "list_set_deferrable_startup_penalty": [ + { + "set_deferrable_startup_penalty": 0.0 + }, + { + "set_deferrable_startup_penalty": 0.0 + } + ], + "load_peak_hours_cost": 0.1907, + "load_offpeak_hours_cost": 0.1419, + "production_price_forecast_method": "constant", + "photovoltaic_production_sell_price": 0.1419, + "maximum_power_from_grid": 9000, + "maximum_power_to_grid": 9000, + "list_pv_module_model": [ + { + "pv_module_model": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" + } + ], + "list_pv_inverter_model": [ + { + "pv_inverter_model": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" + } + ], + "list_surface_tilt": [ + { + "surface_tilt": 30 + } + ], + "list_surface_azimuth": [ + { + "surface_azimuth": 205 + } + ], + "list_modules_per_string": [ + { + "modules_per_string": 16 + } + ], + "list_strings_per_inverter": [ + { + "strings_per_inverter": 1 + } + ], + "inverter_is_hybrid": false, + "compute_curtailment": false, + "set_use_battery": false, + "battery_discharge_power_max": 1000, + "battery_charge_power_max": 1000, + "battery_discharge_efficiency": 0.95, + "battery_charge_efficiency": 0.95, + "battery_nominal_energy_capacity": 5000, + "battery_minimum_state_of_charge": 0.3, + "battery_maximum_state_of_charge": 0.9, + "battery_target_state_of_charge": 0.6 +} \ No newline at end of file diff --git a/data/config_defaults.json b/data/config_defaults.json index 5e26274d..16bf7551 100644 --- a/data/config_defaults.json +++ b/data/config_defaults.json @@ -18,13 +18,13 @@ "weight_battery_charge": 1.0, "sensor_power_photovoltaics": "sensor.power_photovoltaics", "sensor_power_load_no_var_loads": "sensor.power_load_no_var_loads", - "list_sensor_replace_zero": [ - { - "sensor_replace_zero": "sensor.power_photovoltaics" - }, - { - "sensor_replace_zero": "sensor.power_load_no_var_loads" - } + "list_sensor_replace_zero": [ + { + "sensor_replace_zero": "sensor.power_photovoltaics" + }, + { + "sensor_replace_zero": "sensor.power_load_no_var_loads" + } ], "list_sensor_linear_interp": [ { @@ -73,22 +73,24 @@ "end_timesteps_of_each_deferrable_load": 0 } ], - "list_peak_hours_periods_start_hours": [ - { - "peak_hours_periods_start_hours": "02:54" - }, - { - "peak_hours_periods_start_hours": "17:24" - } - ], - "list_peak_hours_periods_end_hours": [ - { - "peak_hours_periods_end_hours": "15:24" - }, - { - "peak_hours_periods_end_hours": "20:54" - } - ], + "load_peak_hour_periods": { + "period_hp_1": [ + { + "start": "15:24" + }, + { + "end": "02:54" + } + ], + "period_hp_2": [ + { + "start": "15:24" + }, + { + "end": "02:54" + } + ] + }, "list_treat_deferrable_load_as_semi_cont": [ { "treat_deferrable_load_as_semi_cont": true @@ -160,4 +162,4 @@ "battery_minimum_state_of_charge": 0.3, "battery_maximum_state_of_charge": 0.9, "battery_target_state_of_charge": 0.6 -} +} \ No newline at end of file diff --git a/setup.py b/setup.py index 1c6540f8..63c39fae 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ 'emhass=emhass.command_line:main', ], }, - package_data={'emhass': ['templates/index.html','templates/template.html','static/advanced.html','static/basic.html', 'static/script.js', - 'static/style.css','static/img/emhass_icon.png','static/img/emhass_logo_short.svg', 'static/img/feather-sprite.svg', + package_data={'emhass': ['templates/index.html','templates/template.html','templates/configuration.html','static/advanced.html','static/basic.html', 'static/script.js', + 'static/style.css','static/configuration_list','static/img/emhass_icon.png','static/img/emhass_logo_short.svg', 'static/img/feather-sprite.svg','static/data/param_definitions.json' 'data/cec_modules.pbz2', 'data/cec_inverters.pbz2']}, ) diff --git a/src/emhass/static/basic.html b/src/emhass/static/basic.html index 7bc78114..24fea3d0 100644 --- a/src/emhass/static/basic.html +++ b/src/emhass/static/basic.html @@ -7,6 +7,7 @@

Use the button below to manually launch optimization task

The day-ahead optimization button will run once, based on the values entered into the configuration page.
- After a few seconds, the charts and table below should be updated to reflect the optimization plan for the next 24 hours. + After a few seconds, the charts and table below should be updated to reflect the optimization plan for the next + 24 hours.

-
+ \ No newline at end of file diff --git a/src/emhass/static/configuration_list.html b/src/emhass/static/configuration_list.html new file mode 100644 index 00000000..0cf1e959 --- /dev/null +++ b/src/emhass/static/configuration_list.html @@ -0,0 +1,43 @@ + +
+
+

Local

+
+
+
+
+
+

System

+
+
+
+
+
+

Tariff

+
+
+
+
+
+

Solar System (PV)

+
+
+
+
+
+

Deferrable Loads

+ + +
+
+
+
+
+

Battery

+ +
+
+
\ No newline at end of file diff --git a/src/emhass/static/configuration_script.js b/src/emhass/static/configuration_script.js new file mode 100644 index 00000000..6ff530db --- /dev/null +++ b/src/emhass/static/configuration_script.js @@ -0,0 +1,693 @@ +//javascript file for processing configuration page + +//Files +//param_definitions.json : stores information about parameters (E.g. their defaults, their type, and what section to be in) +//configuration_list.html : template html to act as a base for the list view. (Params get dynamically added after) + +//on page reload +window.onload = async function () { + ///fetch configuration parameters from json file + param_definitions = await getParamDefinitions(); + //obtain configuration from emhass + config = await obtainConfig(); + //obtain list template html to render parameters in list view (as input items) + list_html = await getListHTML(); + //load list parameter page (default) + loadConfigurationListPage(param_definitions, config, list_html); + + //add event listener to save button + document + .getElementById("save") + .addEventListener("click", () => saveConfiguration(param_definitions)); + + //add defaults listener to save button + document + .getElementById("defaults") + .addEventListener("click", () => + ToggleView(param_definitions, list_html, true) + ); + + //add json listener to save button (toggle between json box and list view) + document + .getElementById("json-toggle") + .addEventListener("click", () => + ToggleView(param_definitions, list_html, false) + ); +}; + +//obtain file containing information about parameters +async function getParamDefinitions() { + const response = await fetch(`static/data/param_definitions.json`); + if (response.status !== 200 && response.status !== 201) { + //alert error in alert box + errorAlert("Unable to obtain definitions file"); + } + const param_definitions = await response.json(); + return await param_definitions; +} + +//obtain emhass built config +async function obtainConfig() { + config = {}; + response = await fetch(`/get-config`, { + method: "GET", + }); + blob = await response.blob(); //get data blob + config = await new Response(blob).json(); //obtain json from blob + if (response.status !== 200 && response.status !== 201) { + errorAlert("Unable to obtain config file"); + } + return config; +} + +//obtain emhass default config +async function ObtainDefaultConfig() { + config = {}; + response = await fetch(`/get-config/defaults`, { + method: "GET", + }); + blob = await response.blob(); //get data blob + config = await new Response(blob).json(); //obtain json from blob + if (response.status !== 200 && response.status !== 201) { + errorAlert("Unable to obtain default config file"); + } + + return config; +} + +//get html data from configuration_list.html (list template) +async function getListHTML() { + const response = await fetch(`static/configuration_list.html`); + blob = await response.blob(); //get data blob + htmlTemplateData = await new Response(blob).text(); //obtain html from blob + showChangeStatus(response.status, [ + "Unable to obtain configuration_list html file", + ]); + return await htmlTemplateData; +} + +//load list configuration page/form +function loadConfigurationListPage(param_definitions, config, list_html) { + //list parameters used in the section headers + header_input_list = ["set_use_battery", "number_of_deferrable_loads"]; + + //get the main container and append list template as a base + document.getElementById("configurationContainer").innerHTML = list_html; + + //loop though configuration sections ('Local','System','Tariff','Solar System (PV)') in json file, build sections + for (var section in param_definitions) { + //build one section at a time + buildParamContainers( + section, + param_definitions[section], + config, + header_input_list + ); + + //after sections have been built + //add event listeners for section header inputs + for (header_input_param of header_input_list) { + if (param_definitions[section].hasOwnProperty(header_input_param)) { + //grab default from definitions file + value = param_definitions[section][header_input_param]["default_value"]; + //find input element (parameter name as the input element ID) + header_input_element = document.getElementById(header_input_param); + //add listener to element to input + header_input_element.addEventListener("input", (e) => + headerElement(e.target, param_definitions, config) + ); + //check EMHASS config contains a stored param value + value = checkConfigParam(value, config, header_input_param); + //set value of input + header_input_element.value = value; + //checkboxes (for Booleans) use checked instead of value + if (header_input_element.tagName == "checkbox") { + header_input_element.checked = value; + } + //manually trigger header element event listener for initial state + headerElement(header_input_element, param_definitions, config); + } + } + } +} + +//build sections body, param containers (containing param input) +function buildParamContainers( + section, + section_parameters_definitions, + config, + header_input_list +) { + //get the section container + SectionContainer = document.getElementById(section); + //get the body container inside the section + SectionParamElement = SectionContainer.getElementsByClassName("section-body"); + + //loop though parameters in definition file, generate and append param containers for the section + for (const [ + parameter_definition_name, + parameter_definition_object, + ] of Object.entries(section_parameters_definitions)) { + //if type array.* and not in "Deferrable Loads" section, add plus and minus buttons + array_buttons = ""; + if ( + parameter_definition_object["input"].search("array.") > -1 && + section != "Deferrable Loads" + ) { + array_buttons = ` + + +
+ `; + } + //check if param is set in the section header, if so skip param container html append + if (header_input_list.includes(parameter_definition_name)) { + continue; + } + + //generates and appends param container + SectionParamElement[0].innerHTML += ` +
+
${ + parameter_definition_object["friendly_name"] + }:
${parameter_definition_name}
+ ${array_buttons} +
+ ${buildParamElement( + parameter_definition_object, + parameter_definition_name, + config + )} +
+

${parameter_definition_object["Description"]}

+
+ `; + } + + //add button (array plus) event listeners + let plus = SectionContainer.querySelectorAll(".input-plus"); + plus.forEach(function (answer) { + answer.addEventListener("click", () => + plusElements(answer.classList[1], param_definitions, section, {}) + ); + }); + + //subtract button (array minus) event listeners + let minus = SectionContainer.querySelectorAll(".input-minus"); + minus.forEach(function (answer) { + answer.addEventListener("click", () => minusElements(answer.classList[1])); + }); + + //check boxes that should be ticked (check value of input and match to checked) + let checkbox = document.querySelectorAll("input[type='checkbox']"); + checkbox.forEach(function (answer) { + let value = answer.value === "true"; + answer.checked = value; + }); + + //loop though sections params again, check if param has a requirement, if so add a listener to the required param input + for (const [ + parameter_definition_name, + parameter_definition_object, + ] of Object.entries(section_parameters_definitions)) { + //check if param has a requirement from definitions file + if ("requires" in parameter_definition_object) { + // get param requirement element + const requirement_element = document.getElementById( + Object.keys(parameter_definition_object["requires"])[0] + ); + // get param that has requirement + const param_element = document.getElementById(parameter_definition_name); + //obtain param inputs, on change, trigger function + requirement_inputs = + requirement_element.getElementsByClassName("param_input"); + //grab required value + const requirement_value = Object.values( + parameter_definition_object["requires"] + )[0]; + + //for all required inputs + for (const input of requirement_inputs) { + //if listener not already attached + if (input.getAttribute("listener") !== "true") { + //create event listener with arguments referencing the required param. param with requirement and required value + input.addEventListener("input", () => + checkRequirements(input, param_element, requirement_value) + ); + //manually run function to gain initial state + checkRequirements(input, param_element, requirement_value); + } + } + } + } +} + +//create html input element/s for a param container (called by buildParamContainers) +function buildParamElement( + parameter_definition_object, + parameter_definition_name, + config +) { + var type = ""; + var inputs = ""; + var type_specific_html = ""; + var type_specific_html_end = ""; + + //switch statement to adjust generated html according to its data type + switch (parameter_definition_object["input"]) { + case "array.int": + case "int": + type = "number"; + placeholder = parseInt(parameter_definition_object["default_value"]); + break; + case "array.string": + case "string": + type = "text"; + placeholder = parameter_definition_object["default_value"]; + break; + case "array.time": + case "time": + type = "time"; + break; + case "array.boolean": + case "boolean": + type = "checkbox"; + type_specific_html = ` + + `; + placeholder = parameter_definition_object["default_value"] === "true"; + break; + case "array.float": + case "float": + type = "number"; + placeholder = parseFloat(parameter_definition_object["default_value"]); + break; + case "select": + break; + } + + //check default values saved in param definitions + value = parameter_definition_object["default_value"]; + //check if a param value is saved in the config file (if so overwrite definition default) + value = checkConfigParam(value, config, parameter_definition_name); + + //generate and return param input html, + //check if param value is an object if so treat value as an array of values + if (typeof value !== "object") { + //if select, generate and return select instead of input + if (parameter_definition_object["input"] == "select") { + let inputs = ``; + return inputs; + } + // generate param input html and return + else { + return ` + ${type_specific_html} + + ${type_specific_html_end} + `; + } + } + // else (value isn't a object) loop though values, generate inputs and and return + else { + //for items such as load_peak_hour_periods (object of array objects) + if (typeof Object.values(value)[0] === "object") { + for (param of Object.values(value)) { + for (items of Object.values(param)) { + inputs += ``; + } + inputs += `
`; + } + return inputs; + } + // array of values/objects + else { + let inputs = ""; + for (param of value) { + inputs += ` + ${type_specific_html} + + ${type_specific_html_end} + `; + } + return inputs; + } + } +} + +//add param elements (for type array) +function plusElements( + parameter_definition_name, + param_definitions, + section, + config +) { + param_element = document.getElementById(parameter_definition_name); + param_input_container = + param_element.getElementsByClassName("param-input")[0]; + // Add a copy of the param element + param_input_container.innerHTML += buildParamElement( + param_definitions[section][parameter_definition_name], + parameter_definition_name, + config + ); +} + +//Remove param elements (minimum 1) +function minusElements(param) { + param_inputs = document.getElementById(param).getElementsByTagName("input"); + + //verify if input is a boolean (if so remove switch with input) + if (param_inputs[param_inputs.length - 1].parentNode.tagName === "LABEL") { + param_input = param_inputs[param_inputs.length - 1].parentNode; + } else { + param_input = param_inputs[param_inputs.length - 1]; + } + + if (param_inputs.length > 1) { + param_input.remove(); + } +} + +//check requirement_element inputs, +//if requirement_element don't match requirement_value, add .requirement-disable class to param_element +//else remove class +function checkRequirements( + requirement_element, + param_element, + requirement_value +) { + //get current value of required element + if (requirement_element.type == "checkbox") { + requirement_element_value = requirement_element.checked; + } else { + requirement_element_value = requirement_element.value; + } + + if (requirement_element_value != requirement_value) { + if (!param_element.classList.contains("requirement-disable")) { + param_element.classList.add("requirement-disable"); + } + } else { + if (param_element.classList.contains("requirement-disable")) { + param_element.classList.remove("requirement-disable"); + } + } +} + +//retrieve header inputs and execute accordingly +function headerElement(element, param_definitions, config) { + switch (element.id) { + //if set_use_battery, add or remove battery section (inc. params) + case "set_use_battery": + param_container = element + .closest(".section-card") + .getElementsByClassName("section-body")[0]; + param_container.innerHTML = ""; + if (element.checked) { + buildParamContainers("Battery", param_definitions["Battery"], config, [ + "set_use_battery", + ]); + element.checked = true; + } + break; + //if number_of_deferrable_loads, number of inputs per param in Deferrable Loads section should add up to number_of_deferrable_loads in header + case "number_of_deferrable_loads": + param_container = element + .closest(".section-card") + .getElementsByClassName("section-body")[0]; + param_list = param_container.getElementsByClassName("param"); + difference = + parseInt(element.value) - + param_container.firstElementChild.querySelectorAll("input").length; + //add elements + if (difference > 0) { + for (let i = difference; i >= 1; i--) { + for (const param of param_list) { + //append element, do not pass config to obtain default parameter from definitions file + plusElements(param.id, param_definitions, "Deferrable Loads", {}); + } + } + } + //subtract elements + if (difference < 0) { + for (let i = difference; i <= -1; i++) { + for (const param of param_list) { + minusElements(param.id); + } + } + } + break; + } +} + +//checks parameter in config, updates value if exists +function checkConfigParam(value, config, parameter_definition_name) { + isArray = false; + if (config !== null && config !== undefined) { + //check if parameter has a saved value + if (parameter_definition_name in config) { + value = config[parameter_definition_name]; + } + //check values saved in config are dict arrays (E.g. sensor_replace_zero in list_sensor_replace_zero) + //extract values and return object array + if ("list_" + parameter_definition_name in config) { + isArray = true; + // extract parameter values from object array + value = config["list_" + parameter_definition_name].map(function (a) { + return a[parameter_definition_name]; + }); + } + } + return value; +} + +//send parameter input values to EMHASS, to save to config.json and param.pkl +async function saveConfiguration(param_definitions) { + //start wth none + config = {}; + //check if page is in list or box view + config_box_element = document.getElementById("config-box"); + + //if true, in list view + if (config_box_element === null) { + //retrieve params by looping though param_definitions list (loop through configuration sections) + for (var [section_name, section_object] of Object.entries( + param_definitions + )) { + //loop through parameters + for (var [ + parameter_definition_name, + parameter_definition_object, + ] of Object.entries(section_object)) { + let param_values = []; + let param_array = false; + // get param container + param_element = document.getElementById(parameter_definition_name); + //extract inputs from param + if (param_element != null) { + //check if param_element is also param_input (ex. for header param) + if (param_element.tagName == "INPUT" && param_inputs.length === 0) { + param_inputs = [param_element]; + } else { + param_inputs = param_element.getElementsByClassName("param_input"); + } + + //obtain param input type from param_definitions, check if param should be formatted as an array + param_array = Boolean( + !parameter_definition_object["input"].search("array") + ); + // loop though param_inputs, extract the element/s values + for (var input of param_inputs) { + switch (input.type) { + case "number": + param_values.push(parseFloat(input.value)); + break; + case "checkbox": + param_values.push(input.checked); + break; + default: + param_values.push(input.value); + break; + } + } + //build parameters using values from inputs + if (param_array) { + //load_peak_hour_periods (object of objects ) + if ( + parameter_definition_object["input"] == "array.time" && + param_values.length % 2 === 0 + ) { + config[parameter_definition_name] = {}; + for (let i = 0; i < param_values.length; i++) { + config[parameter_definition_name][ + "period_hp_" + + (Object.keys(config[parameter_definition_name]).length + 1) + ] = [{ start: param_values[i] }, { end: param_values[++i] }]; + } + } + //array list (array of objects) + else { + config["list_" + parameter_definition_name] = []; + for (const value of param_values) { + config["list_" + parameter_definition_name].push({ + [parameter_definition_name]: value, + }); + } + } + } + //single value + if (!param_array && param_values.length) { + config[parameter_definition_name] = param_values[0]; + } + } + } + } + } + //in box view + else { + //try and parse json from box + try { + config = JSON.parse(config_box_element.value); + } catch (error) { + //if json error, show in alert box + document.getElementById("alert-text").textContent = + "\r\n" + + error + + "\r\n" + + "JSON Error: String values may not be wrapped in quotes"; + document.getElementById("alert").style.display = "block"; + document.getElementById("alert").style.textAlign = "center"; + return 0; + } + } + + //send built config to emhass + const response = await fetch(`/set-config`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(config), + }); + showChangeStatus(response.status, await response.json()); +} + +//Toggle between box (json) and list view +async function ToggleView(param_definitions, list_html, default_reset) { + let selected = ""; + config = {}; + + //find out if list or box view is active + configuration_container = document.getElementById("configurationContainer"); + // if section-cards (config sections/list) exists + config_card = configuration_container.getElementsByClassName("section-card"); + //selected view (0 = box) + selected_view = Boolean(config_card.length); + + //if default_reset is passed do not switch views, instead reinitialize view with default params + if (default_reset) { + selected_view = !selected_view; + //obtain default config as config (when pressing the default button) + config = await ObtainDefaultConfig(); + } else { + //obtain latest config + config = await obtainConfig(); + } + + //if array is empty assume json box is selected + if (selected_view) { + selected = "list"; + } else { + selected = "box"; + } + //remove contents of current view + configuration_container.innerHTML = ""; + //build new view + switch (selected) { + case "box": + //load list + loadConfigurationListPage(param_definitions, config, list_html); + break; + case "list": + //load box + loadConfigurationBoxPage(config); + break; + } +} + +//load box (json textarea) html +async function loadConfigurationBoxPage(config) { + configuration_container.innerHTML = ` + + `; + //set created textarea box with retrieved config + document.getElementById("config-box").innerHTML = JSON.stringify( + config, + null, + 2 + ); +} + +//function in control of status icons and alert box from a fetch request +async function showChangeStatus(status, logJson) { + var loading = document.getElementById("loader"); //element showing statuses + if (status === "remove") { + //remove all + loading.innerHTML = ""; + loading.classList.remove("loading"); + } else if (status === "loading") { + //show loading logo + loading.innerHTML = ""; + loading.classList.add("loading"); //append class with loading animation styling + } else if (status === 201) { + //if status is 201, then show a tick + loading.classList.remove("loading"); + loading.innerHTML = `

`; + } else if (status === 200) { + //if status is 201, then show a tick + loading.innerHTML = ""; + loading.classList.remove("loading"); + } else { + //then show a cross + loading.classList.remove("loading"); + loading.innerHTML = `

`; //show cross icon to indicate an error + if (logJson.length != 0) { + document.getElementById("alert-text").textContent = + "\r\n\u2022 " + logJson.join("\r\n\u2022 "); //show received log data in alert box + document.getElementById("alert").style.display = "block"; + document.getElementById("alert").style.textAlign = "left"; + } + } +} + +//simple function to write text to the alert box +async function errorAlert(text) { + document.getElementById("alert-text").textContent = "\r\n" + text + "\r\n"; + document.getElementById("alert").style.display = "block"; + document.getElementById("alert").style.textAlign = "left"; + return 0; +} + +//check config works +//remove logging +//loading +//tick and cross sizing +//add config to .gitignore +//remove old objects +//clean up css +//array values (refine pv peak) +//error in send or retrive (alert box) +//config path in data folder +//cli paths +//add redudency? diff --git a/src/emhass/static/data/param_definitions.json b/src/emhass/static/data/param_definitions.json new file mode 100644 index 00000000..43cfb64b --- /dev/null +++ b/src/emhass/static/data/param_definitions.json @@ -0,0 +1,407 @@ +{ + "Local": { + "sensor_power_photovoltaics": { + "friendly_name": "Sensor power sensor", + "Description": "This is the name of the photovoltaic power-produced sensor in Watts from Home Assistant. For example: ‘sensor.power_photovoltaics’.", + "input": "string", + "default_value": "sensor.power_photovoltaics" + }, + "sensor_power_load_no_var_loads": { + "friendly_name": "Sensor power loads with no variable loads", + "Description": "The name of the household power consumption sensor in Watts from Home Assistant. The deferrable loads that we will want to include in the optimization problem should be subtracted from this sensor in HASS. For example: ‘sensor.power_load_no_var_loads’", + "input": "string", + "default_value": "sensor.power_load_no_var_loads" + }, + "sensor_replace_zero": { + "friendly_name": "Sensor Nan values with 0s", + "Description": "The list of retrieved variables that we would want to replace nans (if they exist) with zeros.", + "input": "array.string", + "default_value": "sensor.power_photovoltaics" + }, + "sensor_linear_interp": { + "friendly_name": "Sensor NAN values with linear interpolation", + "Description": "The list of retrieved variables that we would want to interpolate nans values using linear interpolation", + "input": "array.string", + "default_value": "sensor.power_photovoltaics" + }, + "continual_publish": { + "friendly_name": "Continually publish optimization results", + "Description": "set to True to save entities to .json after an optimization run. Then automatically republish the saved entities (with updated current state value) every freq minutes. entity data saved to data_path/entities.", + "input": "boolean", + "default_value": false + } + }, + "System": { + "optimization_time_step": { + "friendly_name": "Optimization steps per minute (timesteps)", + "Description": "The time step to resample retrieved data from hass. This parameter is given in minutes. It should not be defined too low or you will run into memory problems when defining the Linear Programming optimization. Defaults to 30", + "input": "int", + "default_value": 30 + }, + "historic_days_to_retrieve": { + "friendly_name": "historic days to retrieve", + "Description": "We will retrieve data from now to days_to_retrieve days. Defaults to 2", + "input": "int", + "default_value": 2 + }, + "load_negative": { + "friendly_name": "Load negative values", + "Description": "Set this parameter to True if the retrieved load variable is negative by convention. Defaults to False", + "input": "boolean", + "default_value": false + }, + "set_zero_min": { + "friendly_name": "Remove Negatives", + "Description": "Set this parameter to True to give a special treatment for a minimum value saturation to zero for power consumption data. Values below zero are replaced by nans. Defaults to True.", + "input": "boolean", + "default_value": true + }, + "method_ts_round": { + "friendly_name": "Timestamp rounding method", + "Description": "Set the method for timestamp rounding, options are: first, last and nearest.", + "input": "select", + "select_options": [ + "nearest", + "first", + "last" + ], + "default_value": "nearest" + }, + "delta_forecast_daily": { + "friendly_name": "Number of forecasted days", + "Description": "The number of days for forecasted data. Defaults to 1.", + "input": "int", + "default_value": 1 + }, + "load_forecast_method": { + "friendly_name": "Load forecast method", + "Description": "The load forecast method that will be used. The options are ‘csv’ to load a CSV file or ‘naive’ for a simple 1-day persistence model.", + "input": "select", + "select_options": [ + "naive", + "csv" + ], + "default_value": "naive" + }, + "set_total_pv_sell": { + "friendly_name": "PV straight to grid", + "Description": "Set this parameter to true to consider that all the PV power produced is injected to the grid. No direct self-consumption. The default is false, for a system with direct self-consumption.", + "input": "boolean", + "default_value": false + }, + "lp_solver": { + "friendly_name": "Linear programming solver", + "Description": "Set the name of the linear programming solver that will be used. Defaults to ‘COIN_CMD’. The options are ‘PULP_CBC_CMD’, ‘GLPK_CMD’ and ‘COIN_CMD’.", + "input": "select", + "select_options": [ + "COIN_CMD", + "PULP_CBC_CMD", + "GLPK_CMD" + ], + "default_value": "COIN_CMD" + }, + "lp_solver_path": { + "friendly_name": "Linear programming solver program path", + "Description": "Set the path to the LP solver. Defaults to ‘/usr/bin/cbc’.", + "input": "text", + "default_value": "/usr/bin/cbc" + }, + "weather_forecast_method": { + "friendly_name": "Weather forecast method", + "Description": "This will define the weather forecast method that will be used. options are 'scrapper' (ClearOutside), 'Solcast', 'solar.forecast' (forecast.solar) and 'csv' to load a CSV file. When loading a CSV file this will be directly considered as the PV power forecast in Watts.", + "input": "select", + "select_options": [ + "scrapper", + "solcast", + "solar.forecast", + "csv" + ], + "default_value": "scrapper" + }, + "maximum_power_from_grid": { + "friendly_name": "Max power from grid", + "Description": "The maximum power that can be supplied by the utility grid in Watts (consumption). Defaults to 9000.", + "input": "int", + "default_value": 9000 + }, + "maximum_power_to_grid": { + "friendly_name": "Max export power to grid", + "Description": "The maximum power that can be supplied to the utility grid in Watts (injection). Defaults to 9000.", + "input": "int", + "default_value": 9000 + }, + "inverter_is_hybrid": { + "friendly_name": "Inverter is a hybrid", + "Description": "Set to True to consider that the installation inverter is hybrid for PV and batteries (Default False)", + "input": "boolean", + "default_value": false + }, + "compute_curtailment": { + "friendly_name": "Set compute curtailment (grid export limit)", + "Description": "Set to True to compute a special PV curtailment variable (Default False)", + "input": "boolean", + "default_value": false + } + }, + "Tariff": { + "load_cost_forecast_method": { + "friendly_name": "Load cost method", + "Description": "Define the method that will be used for load cost forecast. The options are ‘hp_hc_periods’ for peak and non-peak hours contracts, and ‘csv’ to load custom cost from CSV file.", + "input": "select", + "select_options": [ + "hp_hc_periods", + "csv" + ], + "default_value": "hp_hc_periods" + }, + "load_peak_hour_periods": { + "friendly_name": "List peak hour periods", + "Description": "A list of peak hour periods for load consumption from the grid. This is useful if you have a contract with peak and non-peak hours.", + "input": "array.time", + "default_value": { + "period_hp_1": { + "start": "02:54", + "end": "15:24" + }, + "period_hp_2": { + "start": "02:54", + "end": "15:24" + } + }, + "requires": { + "load_cost_forecast_method": "hp_hc_periods" + } + }, + "load_peak_hours_cost": { + "friendly_name": "Peak hours electrical energy cost", + "Description": "The cost of the electrical energy during peak hours", + "input": "float", + "requires": { + "load_cost_forecast_method": "hp_hc_periods" + }, + "default_value": 0.1907 + }, + "load_offpeak_hours_cost": { + "friendly_name": "Off-peak hours electrical energy cost", + "Description": "The cost of the electrical energy during off-peak hours", + "input": "float", + "requires": { + "load_cost_forecast_method": "hp_hc_periods" + }, + "default_value": 0.1419 + }, + "production_price_forecast_method": { + "friendly_name": "PV power production price forecast method", + "Description": "Define the method that will be used for PV power production price forecast. This is the price that is paid by the utility for energy injected into the grid. The options are ‘constant’ for a constant fixed value or ‘csv’ to load custom price forecasts from a CSV file.", + "input": "select", + "select_options": [ + "constant", + "csv" + ], + "default_value": "constant" + }, + "photovoltaic_production_sell_price": { + "friendly_name": "Constant PV power production price", + "Description": "The paid price for energy injected to the grid from excess PV production in €/kWh.", + "input": "float", + "default_value": 0.1419, + "requires": { + "production_price_forecast_method": "constant" + } + } + }, + "Solar System (PV)": { + "pv_module_model": { + "friendly_name": "PV module model name", + "Description": "The PV module model. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.string", + "input_attributes": "_'s", + "default_value": "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M" + }, + "pv_inverter_model": { + "friendly_name": "The PV inverter model name", + "Description": "The PV inverter model. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.string", + "input_attributes": "_'s", + "default_value": "Fronius_International_GmbH__Fronius_Primo_5_0_1_208_240__240V_" + }, + "surface_tilt": { + "friendly_name": "The PV panel tilt", + "Description": "The tilt angle of your solar panels. Defaults to 30. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.int", + "default_value": 30 + }, + "surface_azimuth": { + "friendly_name": "The PV azimuth (direction)", + "Description": "The azimuth of your PV installation. Defaults to 205. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.int", + "default_value": 205 + }, + "modules_per_string": { + "friendly_name": "Number of modules per string", + "Description": "The number of modules per string. Defaults to 16. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.int", + "default_value": 16 + }, + "strings_per_inverter": { + "friendly_name": "Number of strings per inverter", + "Description": "The number of used strings per inverter. Defaults to 1. This parameter can be a list of items to enable the simulation of mixed orientation systems.", + "input": "array.int", + "default_value": 1 + } + }, + "Deferrable Loads": { + "number_of_deferrable_loads": { + "friendly_name": "Number of deferrable loads", + "Description": "Define the number of deferrable loads (appliances to shift) to consider. Defaults to 2.", + "input": "int", + "default_value": 2 + }, + "nominal_power_of_deferrable_loads": { + "friendly_name": "Deferrable load nominal power", + "Description": "The nominal (calculated max) power for each deferrable load in Watts.", + "input": "array.float", + "default_value": 3000.0 + }, + "operating_hours_of_each_deferrable_load": { + "friendly_name": "Deferrable load operating hours", + "Description": "The total number of hours that each deferrable load should operate", + "input": "array.int", + "default_value": 0 + }, + "treat_deferrable_load_as_semi_cont": { + "friendly_name": "Deferrable load as semi-continuous (on/off) variable", + "Description": "Semi-continuous variables (True) are variables that must take a value that can be either their maximum or minimum/zero (for example On = Maximum load, Off = 0 W). Non semi-continuous (which means continuous) variables (False) can take any values between their maximum and minimum", + "input": "array.boolean", + "default_value": true + }, + "set_deferrable_load_single_constant": { + "friendly_name": "Deferrable load run single constant per optimization", + "Description": "Define if we should set each deferrable load as a constant fixed value variable with just one startup for each optimization task", + "input": "array.boolean", + "default_value": false + }, + "set_deferrable_startup_penalty": { + "friendly_name": "Set deferrable startup penalty", + "Description": "For penalty P, each time the deferrable load turns on will incur an additional cost of P * number_of_deferrable_loads * cost_of_electricity at that time", + "input": "array.float", + "default_value": 0.0 + }, + "start_timesteps_of_each_deferrable_load": { + "friendly_name": "Deferrable start timestamp", + "Description": "The timestep as from which each deferrable load is allowed to operate (if you don’t want the deferrable load to use the whole optimization time window). If you specify a value of 0 (or negative), the deferrable load will be optimized as from the beginning of the complete prediction horizon window.", + "input": "array.int", + "default_value": 0 + }, + "end_timesteps_of_each_deferrable_load": { + "friendly_name": "Deferrable end timestamp", + "Description": "The timestep before which each deferrable load should operate. The deferrable load is not allowed to operate after the specified time step. If a value of 0 (or negative) is provided, the deferrable load is allowed to operate in the complete optimization window)", + "input": "array.int", + "default_value": 0 + } + }, + "Battery": { + "set_use_battery": { + "friendly_name": "Enable Battery", + "Description": "Set to True if we should consider an energy storage device such as a Li-Ion battery. Defaults to False", + "input": "boolean", + "default_value": false + }, + "set_nocharge_from_grid": { + "friendly_name": "forbid charging battery from grid", + "Description": "Set this to true if you want to forbid charging the battery from the grid. The battery will only be charged from excess PV", + "input": "boolean", + "default_value": false + }, + "set_nodischarge_to_grid": { + "friendly_name": "forbid battery discharge to the grid", + "Description": "Set this to true if you want to forbid discharging battery power to the grid.", + "input": "boolean", + "default_value": true + }, + "set_battery_dynamic": { + "friendly_name": "Set Battery dynamic (dis)charge power limiting", + "Description": "Set a power dynamic limiting condition to the battery power. This is an additional constraint on the battery dynamic in power per unit of time (timestep), which allows you to set a percentage of the battery’s nominal full power as the maximum power allowed for (dis)charge.", + "input": "boolean", + "default_value": false + }, + "battery_dynamic_max": { + "friendly_name": "maximum percentage of battery discharge per timestep", + "Description": "The maximum positive (for discharge) battery power dynamic. This is the allowed power variation (in percentage) of battery maximum power per unit of timestep", + "input": "float", + "default_value": 0.9, + "requires": { + "set_battery_dynamic": true + } + }, + "battery_dynamic_min": { + "friendly_name": "maximum percentage of battery charge per timestep", + "Description": "The maximum negative (for charge) battery power dynamic. This is the allowed power variation (in percentage) of battery maximum power per timestep.", + "input": "float", + "default_value": -0.9, + "requires": { + "set_battery_dynamic": true + } + }, + "weight_battery_discharge": { + "friendly_name": "Add cost weight for battery discharge", + "Description": "An additional weight (currency/ kWh) applied in the cost function to battery usage for discharging", + "input": "float", + "default_value": 1.0 + }, + "weight_battery_charge": { + "friendly_name": "Add cost weight for battery charge", + "Description": "An additional weight (currency/ kWh) applied in the cost function to battery usage for charging", + "input": "float", + "default_value": 1.0 + }, + "battery_discharge_power_max": { + "friendly_name": "Max battery discharge power", + "Description": "The maximum discharge power in Watts", + "input": "int", + "default_value": 1000 + }, + "battery_charge_power_max": { + "friendly_name": "Max battery charge power", + "Description": "The maximum charge power in Watts", + "input": "int", + "default_value": 1000 + }, + "battery_discharge_efficiency": { + "friendly_name": "Battery discharge efficiency", + "Description": "The discharge efficiency. (percentage/100)", + "input": "float", + "default_value": 0.95 + }, + "battery_charge_efficiency": { + "friendly_name": "Battery charge efficiency", + "Description": "The charge efficiency. (percentage/100)", + "input": "float", + "default_value": 0.95 + }, + "battery_nominal_energy_capacity": { + "friendly_name": "Battery total capacity", + "Description": "The total capacity of the battery stack in Wh", + "input": "int", + "default_value": 5000 + }, + "battery_minimum_state_of_charge": { + "friendly_name": "Minimum Battery charge percentage", + "Description": "The minimum allowable battery state of charge. (percentage/100)", + "input": "float", + "default_value": 0.3 + }, + "battery_maximum_state_of_charge": { + "friendly_name": "Maximum Battery charge percentage", + "Description": "The maximum allowable battery state of charge. (percentage/100)", + "input": "float", + "default_value": 0.9 + }, + "battery_target_state_of_charge": { + "friendly_name": "Battery desired percentage after optimization", + "Description": "The desired battery state of charge at the end of each optimization cycle. (percentage/100)", + "input": "float", + "default_value": 0.6 + } + } +} \ No newline at end of file diff --git a/src/emhass/static/script.js b/src/emhass/static/script.js index ad02b578..78f8b336 100644 --- a/src/emhass/static/script.js +++ b/src/emhass/static/script.js @@ -1,419 +1,442 @@ +//configuration for processing index page + //on page reload get saved data window.onload = async function () { + pageSelected = await loadBasicOrAdvanced(); - pageSelected = await loadBasicOrAdvanced(); + //add listener for basic and advanced html switch + document + .getElementById("basicOrAdvanced") + .addEventListener("click", () => SwitchBasicOrAdvanced()); - //add listener for basic and advanced html switch - document.getElementById("basicOrAdvanced").addEventListener("click", () => SwitchBasicOrAdvanced()); + //add listerner for configuration page toggle + document + .getElementById("paramConfig") + .addEventListener("click", () => SwitchBasicOrAdvanced()); }; //add listeners to buttons (based on page) function loadButtons(page) { - switch (page) { - case "advanced": - [ - "dayahead-optim", - "forecast-model-fit", - "forecast-model-predict", - "forecast-model-tune", - "regressor-model-fit", - "regressor-model-predict", - "perfect-optim", - "publish-data", - "naive-mpc-optim" - ].forEach((id) => - document.getElementById(id).addEventListener("click", () => formAction(id, "advanced")) - ); - ["input-plus", "input-minus"].forEach((id) => - document.getElementById(id).addEventListener("click", () => dictInputs(id)) - ); - document.getElementById("input-select").addEventListener("change", () => getSavedData()); - document.getElementById("input-clear").addEventListener("click", () => ClearInputData()); - break; - case "basic": - document.getElementById("dayahead-optim-basic").addEventListener("click", () => formAction("dayahead-optim", "basic")); - break; - } + switch (page) { + case "advanced": + [ + "dayahead-optim", + "forecast-model-fit", + "forecast-model-predict", + "forecast-model-tune", + "regressor-model-fit", + "regressor-model-predict", + "perfect-optim", + "publish-data", + "naive-mpc-optim", + ].forEach((id) => + document + .getElementById(id) + .addEventListener("click", () => formAction(id, "advanced")) + ); + ["input-plus", "input-minus"].forEach((id) => + document + .getElementById(id) + .addEventListener("click", () => dictInputs(id)) + ); + document + .getElementById("input-select") + .addEventListener("change", () => getSavedData()); + document + .getElementById("input-clear") + .addEventListener("click", () => ClearInputData()); + break; + case "basic": + document + .getElementById("dayahead-optim-basic") + .addEventListener("click", () => formAction("dayahead-optim", "basic")); + break; + } } //on check present basic or advanced html inside form element async function loadBasicOrAdvanced(RequestedPage) { - let basicFile = "basic.html"; - let advencedFile = "advanced.html"; - var formContainer = document.getElementById("TabSelection"); //container element to house basic or advanced data - //first check any function arg - if (arguments.length == 1) { - switch (RequestedPage) { - case "basic": - htmlData = await getHTMLData(basicFile); - formContainer.innerHTML = htmlData; - loadButtons("basic"); //load buttons based on basic or advanced - if (testStorage()) { localStorage.setItem("TabSelection", "basic") } //remember mode (save to localStorage) - return "basic"; //return basic to get saved data - case "advanced": - htmlData = await getHTMLData(advencedFile); - formContainer.innerHTML = htmlData; - loadButtons("advanced"); - if (testStorage()) { localStorage.setItem("TabSelection", "advanced") } - getSavedData(); - return "advanced"; - default: - htmlData = await getHTMLData(advencedFile); - formContainer.innerHTML = htmlData; - loadButtons("advanced"); - getSavedData(); - return "advanced"; - } - } - //then check localStorage - if (testStorage()) { - if (localStorage.getItem("TabSelection") !== null && localStorage.getItem("TabSelection") === "advanced") { //if advance - htmlData = await getHTMLData(advencedFile); - formContainer.innerHTML = htmlData; - loadButtons("advanced"); - getSavedData(); - return "advanced"; - } - else { //else run basic (first time) - htmlData = await getHTMLData(basicFile); - formContainer.innerHTML = htmlData; - loadButtons("basic"); - return "basic"; + let basicFile = "basic.html"; + let advencedFile = "advanced.html"; + var formContainer = document.getElementById("TabSelection"); //container element to house basic or advanced data + //first check any function arg + if (arguments.length == 1) { + switch (RequestedPage) { + case "basic": + htmlData = await getHTMLData(basicFile); + formContainer.innerHTML = htmlData; + loadButtons("basic"); //load buttons based on basic or advanced + if (testStorage()) { + localStorage.setItem("TabSelection", "basic"); + } //remember mode (save to localStorage) + return "basic"; //return basic to get saved data + case "advanced": + htmlData = await getHTMLData(advencedFile); + formContainer.innerHTML = htmlData; + loadButtons("advanced"); + if (testStorage()) { + localStorage.setItem("TabSelection", "advanced"); } - } else { - //if localStorage not supported, set to advanced page + getSavedData(); + return "advanced"; + default: htmlData = await getHTMLData(advencedFile); formContainer.innerHTML = htmlData; loadButtons("advanced"); + getSavedData(); return "advanced"; } + } + //then check localStorage + if (testStorage()) { + if ( + localStorage.getItem("TabSelection") !== null && + localStorage.getItem("TabSelection") === "advanced" + ) { + //if advance + htmlData = await getHTMLData(advencedFile); + formContainer.innerHTML = htmlData; + loadButtons("advanced"); + getSavedData(); + return "advanced"; + } else { + //else run basic (first time) + htmlData = await getHTMLData(basicFile); + formContainer.innerHTML = htmlData; + loadButtons("basic"); + return "basic"; + } + } else { + //if localStorage not supported, set to advanced page + htmlData = await getHTMLData(advencedFile); + formContainer.innerHTML = htmlData; + loadButtons("advanced"); + return "advanced"; + } } //on button press, check current displayed page data and switch function SwitchBasicOrAdvanced() { - var formContainerChildID = document.getElementById("TabSelection").firstElementChild.id - if (formContainerChildID === 'basic') { - loadBasicOrAdvanced("advanced") - } - else { - loadBasicOrAdvanced("basic") - } + var formContainerChildID = + document.getElementById("TabSelection").firstElementChild.id; + if (formContainerChildID === "basic") { + loadBasicOrAdvanced("advanced"); + } else { + loadBasicOrAdvanced("basic"); + } } - //get html data from basic.html or advanced.html async function getHTMLData(htmlFile) { - const response = await fetch(`static/` + htmlFile); - blob = await response.blob(); //get data blob - htmlTemplateData = await new Response(blob).text(); //obtain html from blob - return await htmlTemplateData; + const response = await fetch(`static/` + htmlFile); + blob = await response.blob(); //get data blob + htmlTemplateData = await new Response(blob).text(); //obtain html from blob + return await htmlTemplateData; } //function pushing data via post, triggered by button action async function formAction(action, page) { + if (page !== "basic") { + //dont try to get input data in basic mode + var data = inputToJson(page); + } else { + var data = {}; + } //send no data - if (page !== "basic") { //dont try to get input data in basic mode - var data = inputToJson(page); - } - else { var data = {} } //send no data - - if (data !== 0) { //don't run if there is an error in the input (box/list) Json data - showChangeStatus("loading", {}); // show loading div for status - const response = await fetch(`action/` + action, { - //fetch data from webserver.py - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(data), //note that post can only send data via strings - }); - if (response.status == 201) { - showChangeStatus(response.status, {}); - if (page !== "basic") { - saveStorage(); //save to storage if successful - } - return true - } //if successful - else { - showChangeStatus(response.status, await response.json()); - return false - } // else get Log data from response - } else { - showChangeStatus("remove"); //replace loading, show tick or cross with none - return false - } + if (data !== 0) { + //don't run if there is an error in the input (box/list) Json data + showChangeStatus("loading", {}); // show loading div for status + const response = await fetch(`action/` + action, { + //fetch data from webserver.py + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(data), //note that post can only send data via strings + }); + if (response.status == 201) { + showChangeStatus(response.status, {}); + if (page !== "basic") { + saveStorage(); //save to storage if successful + } + return true; + } //if successful + else { + showChangeStatus(response.status, await response.json()); + return false; + } // else get Log data from response + } else { + showChangeStatus("remove"); //replace loading, show tick or cross with none + return false; + } } //function in control of status icons of post above async function showChangeStatus(status, logJson) { - var loading = document.getElementById("loader"); //element showing statuses - if (status === "remove") { - //remove all - loading.innerHTML = ""; - loading.classList.remove("loading"); - } else if (status === "loading") { - //show loading logo - loading.innerHTML = ""; - loading.classList.add("loading"); //append class with loading animation styling - } else if (status === 201) { - //if status is 201, then show a tick - loading.classList.remove("loading"); - loading.innerHTML = `

`; - getTemplate(); //get updated templates - } else { - //then show a cross - loading.classList.remove("loading"); - loading.innerHTML = `

`; //show cross icon to indicate an error - if (logJson.length != 0) { - document.getElementById("alert-text").textContent = - "\r\n\u2022 " + logJson.join("\r\n\u2022 "); //show received log data in alert box - document.getElementById("alert").style.display = "block"; - document.getElementById("alert").style.textAlign = "left"; - } + var loading = document.getElementById("loader"); //element showing statuses + if (status === "remove") { + //remove all + loading.innerHTML = ""; + loading.classList.remove("loading"); + } else if (status === "loading") { + //show loading logo + loading.innerHTML = ""; + loading.classList.add("loading"); //append class with loading animation styling + } else if (status === 201) { + //if status is 201, then show a tick + loading.classList.remove("loading"); + loading.innerHTML = `

`; + getTemplate(); //get updated templates + } else { + //then show a cross + loading.classList.remove("loading"); + loading.innerHTML = `

`; //show cross icon to indicate an error + if (logJson.length != 0) { + document.getElementById("alert-text").textContent = + "\r\n\u2022 " + logJson.join("\r\n\u2022 "); //show received log data in alert box + document.getElementById("alert").style.display = "block"; + document.getElementById("alert").style.textAlign = "left"; } + } } //get rendered html template with containing new table data async function getTemplate() { - //fetch data from webserver.py - let htmlTemplateData = ""; - response = await fetch(`template/table-template`, { - method: "GET", - }); - blob = await response.blob(); //get data blob - htmlTemplateData = await new Response(blob).text(); //obtain html from blob - templateDiv = document.getElementById("template"); //get template container element to override - templateDiv.innerHTML = htmlTemplateData; //override container inner html with new data - var scripts = Array.from(templateDiv.getElementsByTagName("script")); //replace script tags manually - for (const script of scripts) { - var TempScript = document.createElement("script"); - TempScript.innerHTML = script.innerHTML; - script.parentElement.appendChild(TempScript); - } + //fetch data from webserver.py + let htmlTemplateData = ""; + response = await fetch(`template/table-template`, { + method: "GET", + }); + blob = await response.blob(); //get data blob + htmlTemplateData = await new Response(blob).text(); //obtain html from blob + templateDiv = document.getElementById("template"); //get template container element to override + templateDiv.innerHTML = htmlTemplateData; //override container inner html with new data + var scripts = Array.from(templateDiv.getElementsByTagName("script")); //replace script tags manually + for (const script of scripts) { + var TempScript = document.createElement("script"); + TempScript.innerHTML = script.innerHTML; + script.parentElement.appendChild(TempScript); + } } //test localStorage support function testStorage() { - try { - localStorage.setItem("test", { test: "123" }); - localStorage.removeItem("test"); - return true; - } catch (error) { - return false; - } + try { + localStorage.setItem("test", { test: "123" }); + localStorage.removeItem("test"); + return true; + } catch (error) { return false; + } + return false; } //function gets saved data (if any) function getSavedData() { - dictInputs(); //check selected current (List or Box) is correct - if (testStorage()) { - //if local storage exists and works - let selectElement = document.getElementById("input-select"); // select button element - var input_container = document.getElementById("input-container"); // container div containing all dynamic input elements (Box/List) - if ( - localStorage.getItem("input_container_content") && - localStorage.getItem("input_container_content") !== "{}" - ) { - //If items already stored in local storage, then override default - if (selectElement.value == "Box") { - //if Box is selected, show saved json data into box - document.getElementById("text-area").value = localStorage.getItem( - "input_container_content" - ); - } - if (selectElement.value == "List") { - //if List is selected, show saved json data into box - storedJson = JSON.parse( - localStorage.getItem("input_container_content") - ); - if (Object.keys(storedJson).length > 0) { - input_container.innerHTML = ""; - i = 1; - for (const ikey in storedJson) { - input_container.appendChild( - createInputListDiv(ikey, JSON.stringify(storedJson[ikey])) - ); //call function to present each key as an list div element (with saved values) - } - } - } + dictInputs(); //check selected current (List or Box) is correct + if (testStorage()) { + //if local storage exists and works + let selectElement = document.getElementById("input-select"); // select button element + var input_container = document.getElementById("input-container"); // container div containing all dynamic input elements (Box/List) + if ( + localStorage.getItem("input_container_content") && + localStorage.getItem("input_container_content") !== "{}" + ) { + //If items already stored in local storage, then override default + if (selectElement.value == "Box") { + //if Box is selected, show saved json data into box + document.getElementById("text-area").value = localStorage.getItem( + "input_container_content" + ); + } + if (selectElement.value == "List") { + //if List is selected, show saved json data into box + storedJson = JSON.parse( + localStorage.getItem("input_container_content") + ); + if (Object.keys(storedJson).length > 0) { + input_container.innerHTML = ""; + i = 1; + for (const ikey in storedJson) { + input_container.appendChild( + createInputListDiv(ikey, JSON.stringify(storedJson[ikey])) + ); //call function to present each key as an list div element (with saved values) + } } + } } + } } //using localStorage, store json data from input-list(List)/text-area(from input-box) elements for saved state save on page refresh (will save state on successful post) function saveStorage() { - var data = JSON.stringify(inputToJson()); - if (testStorage() && data != "{}") { - //don't bother saving if empty and/or storage don't exist - localStorage.setItem("input_container_content", data); - } + var data = JSON.stringify(inputToJson()); + if (testStorage() && data != "{}") { + //don't bother saving if empty and/or storage don't exist + localStorage.setItem("input_container_content", data); + } } //function gets values from input-list/text-area(from input-box) elements and return json dict object function inputToJson() { - var input_container = document.getElementById("input-container"); //container - let inputListArr = document.getElementsByClassName("input-list"); //list - let inputTextArea = document.getElementById("text-area"); //box - let input_container_child = null; - input_container_child = input_container.firstElementChild; //work out which element is first inside container div - var jsonReturnData = {}; + var input_container = document.getElementById("input-container"); //container + let inputListArr = document.getElementsByClassName("input-list"); //list + let inputTextArea = document.getElementById("text-area"); //box + let input_container_child = null; + input_container_child = input_container.firstElementChild; //work out which element is first inside container div + var jsonReturnData = {}; - if (input_container_child == null) { - //if no elements in container then return empty - return jsonReturnData; - } - //if List return box json - if ( - input_container_child.className == "input-list" && - inputListArr.length > 0 - ) { - //if list is first and if list is greater then 0, otherwise give empty dict + if (input_container_child == null) { + //if no elements in container then return empty + return jsonReturnData; + } + //if List return box json + if ( + input_container_child.className == "input-list" && + inputListArr.length > 0 + ) { + //if list is first and if list is greater then 0, otherwise give empty dict - let jsonTempData = "{"; - for (let i = 0; i < inputListArr.length; i++) { - let key = inputListArr[i].getElementsByClassName("input-key")[0].value; - var value = - inputListArr[i].getElementsByClassName("input-value")[0].value; - //curate a string with list elements to parse into json later - if (key !== "") { - //key must not be empty - if (i !== 0) { - jsonTempData = jsonTempData.concat(","); - } //add comma before every parameter, exuding the first - jsonTempData = jsonTempData.concat('"' + key + '":' + value); - } - } - jsonTempData = jsonTempData.concat("}"); - try { - jsonReturnData = JSON.parse(jsonTempData); - } catch (error) { - //if json error, show in alert box - document.getElementById("alert-text").textContent = - "\r\n" + - error + - "\r\n" + - "JSON Error: String values may not be wrapped in quotes"; - document.getElementById("alert").style.display = "block"; - document.getElementById("alert").style.textAlign = "center"; - return 0; - } + let jsonTempData = "{"; + for (let i = 0; i < inputListArr.length; i++) { + let key = inputListArr[i].getElementsByClassName("input-key")[0].value; + var value = + inputListArr[i].getElementsByClassName("input-value")[0].value; + //curate a string with list elements to parse into json later + if (key !== "") { + //key must not be empty + if (i !== 0) { + jsonTempData = jsonTempData.concat(","); + } //add comma before every parameter, exuding the first + jsonTempData = jsonTempData.concat('"' + key + '":' + value); + } } - //if Box return box json - if ( - input_container_child.className == "input-box" && - inputTextArea.value != "" - ) { - //if Box is first and text is not empty, otherwise give empty dict - try { - jsonReturnData = JSON.parse(inputTextArea.value); - } catch (error) { - //if json error, show in alert box - document.getElementById("alert-text").textContent = "\r\n" + error; - document.getElementById("alert").style.display = "block"; - return 0; - } + jsonTempData = jsonTempData.concat("}"); + try { + jsonReturnData = JSON.parse(jsonTempData); + } catch (error) { + //if json error, show in alert box + document.getElementById("alert-text").textContent = + "\r\n" + + error + + "\r\n" + + "JSON Error: String values may not be wrapped in quotes"; + document.getElementById("alert").style.display = "block"; + document.getElementById("alert").style.textAlign = "center"; + return 0; } - return jsonReturnData; + } + //if Box return box json + if ( + input_container_child.className == "input-box" && + inputTextArea.value != "" + ) { + //if Box is first and text is not empty, otherwise give empty dict + try { + jsonReturnData = JSON.parse(inputTextArea.value); + } catch (error) { + //if json error, show in alert box + document.getElementById("alert-text").textContent = "\r\n" + error; + document.getElementById("alert").style.display = "block"; + return 0; + } + } + return jsonReturnData; } //function creates input list div element (and pass it values if given) function createInputListDiv(ikey, ivalue) { - let div = document.createElement("div"); - div.className = "input-list"; - div.innerHTML = ` + let div = document.createElement("div"); + div.className = "input-list"; + div.innerHTML = `

:

`; - if (ikey && ivalue) { - //if value and key is provided (from local storage) then add as elements values - div.getElementsByClassName("input-key")[0].value = String(ikey); - div.getElementsByClassName("input-value")[0].value = String(ivalue); - } + if (ikey && ivalue) { + //if value and key is provided (from local storage) then add as elements values + div.getElementsByClassName("input-key")[0].value = String(ikey); + div.getElementsByClassName("input-value")[0].value = String(ivalue); + } - return div; + return div; } //function assigned to control (add and remove) input (Box and List) elements function dictInputs(action) { - var input_container = document.getElementById("input-container"); // container div containing all dynamic input elements - let selectElement = document.getElementById("input-select"); // select button - let input_container_child = null; - let input_container_child_name = null; - if (input_container.children.length > 0) { - input_container_child = input_container.firstElementChild; // figure out what is the first element inside of container (ie: "text-area" (input-box) or "input-list" (list)) - input_container_child_name = input_container.firstElementChild.className; + var input_container = document.getElementById("input-container"); // container div containing all dynamic input elements + let selectElement = document.getElementById("input-select"); // select button + let input_container_child = null; + let input_container_child_name = null; + if (input_container.children.length > 0) { + input_container_child = input_container.firstElementChild; // figure out what is the first element inside of container (ie: "text-area" (input-box) or "input-list" (list)) + input_container_child_name = input_container.firstElementChild.className; + } + //if list is selected, remove text-area (from Box) element and replace (with input-list) + if (selectElement.value == "List") { + if (action == "input-plus" || input_container_child_name == "input-box") { + //if plus button pressed, or Box element exists + if (input_container_child_name == "input-box") { + input_container_child.remove(); + } + input_container.appendChild(createInputListDiv(false, false)); //call to createInputListDiv function to craft input-list element (with no values) and append inside container element } - //if list is selected, remove text-area (from Box) element and replace (with input-list) - if (selectElement.value == "List") { - if (action == "input-plus" || input_container_child_name == "input-box") { - //if plus button pressed, or Box element exists - if (input_container_child_name == "input-box") { - input_container_child.remove(); - } - input_container.appendChild(createInputListDiv(false, false)); //call to createInputListDiv function to craft input-list element (with no values) and append inside container element - } - if (action == "input-minus") { - //minus button pressed, remove input-list element - if (input_container.children.length > 0) { - let inputListArr = document.getElementsByClassName("input-list"); - let obj = inputListArr.item(inputListArr.length - 1); - obj.innerHTML = ""; - obj.remove(); - } - } + if (action == "input-minus") { + //minus button pressed, remove input-list element + if (input_container.children.length > 0) { + let inputListArr = document.getElementsByClassName("input-list"); + let obj = inputListArr.item(inputListArr.length - 1); + obj.innerHTML = ""; + obj.remove(); + } } - //if box is selected, remove input-list elements and replace (with text-area) - if (selectElement.value == "Box") { - if ( - input_container_child_name == "input-list" || - input_container_child === null - ) { - // if input list exists or no Box element - input_container.innerHTML = ""; //remove input-list list elements via erasing container innerHTML - let div = document.createElement("div"); //add input-box element - div.className = "input-box"; - div.innerHTML = ` + } + //if box is selected, remove input-list elements and replace (with text-area) + if (selectElement.value == "Box") { + if ( + input_container_child_name == "input-list" || + input_container_child === null + ) { + // if input list exists or no Box element + input_container.innerHTML = ""; //remove input-list list elements via erasing container innerHTML + let div = document.createElement("div"); //add input-box element + div.className = "input-box"; + div.innerHTML = ` `; - input_container.appendChild(div); //append inside of container element - } + input_container.appendChild(div); //append inside of container element } + } } //clear stored input data from localStorage (if any), clear input elements async function ClearInputData(id) { - if ( - testStorage() && - localStorage.getItem("input_container_content") !== null - ) { - localStorage.setItem("input_container_content", "{}"); - } - ClearInputElements(); + if ( + testStorage() && + localStorage.getItem("input_container_content") !== null + ) { + localStorage.setItem("input_container_content", "{}"); + } + ClearInputElements(); } //clear input elements async function ClearInputElements() { - let selectElement = document.getElementById("input-select"); - var input_container = document.getElementById("input-container"); - if (selectElement.value == "Box") { - document.getElementById("text-area").value = "{}"; - } - if (selectElement.value == "List") { - input_container.innerHTML = ""; - } + let selectElement = document.getElementById("input-select"); + var input_container = document.getElementById("input-container"); + if (selectElement.value == "Box") { + document.getElementById("text-area").value = "{}"; + } + if (selectElement.value == "List") { + input_container.innerHTML = ""; + } } // //Run day ahead, then publish actions // async function DayheadOptimPublish() { // response = await formAction("dayahead-optim", "basic") -// if (response) { //if successful publish data +// if (response) { //if successful publish data // formAction("publish-data", "basic") // } //} - - - - diff --git a/src/emhass/static/style.css b/src/emhass/static/style.css index fd87ba06..eaf4834f 100644 --- a/src/emhass/static/style.css +++ b/src/emhass/static/style.css @@ -570,6 +570,8 @@ button, .mystyle, .alert, .info, +.section-card, +select, table { border-radius: 7px; /* overflow: visible; */ @@ -578,6 +580,7 @@ table { button, .alert, .info, +.section-card, select { text-transform: none; border-width: 1px; @@ -599,6 +602,8 @@ select { h2 { margin-bottom: .3em; + margin-right: .3em; + margin-left: .3em; } .table_div h4 { @@ -787,7 +792,7 @@ tr:hover td:last-child { .tick { /* tick symbol */ - color: #158b00; + color: #bfebbc; vertical-align: text-top; font-size: 4.0em; animation-name: fadeInOpacity; @@ -798,7 +803,7 @@ tr:hover td:last-child { .cross { /* cross symbol */ - color: #a71515; + color: #e3b7b7; vertical-align: text-top; font-size: 4.0em; animation-name: fadeInOpacity; @@ -941,7 +946,8 @@ select, .input-list input, -.input-box textarea { +.input-box textarea, +#config-box { border-collapse: collapse; border-radius: 7px; border-style: solid; @@ -970,7 +976,223 @@ select, margin: 0; } -/* */ +/* config page */ +#configurationContainer { + border: 1px solid; + border-width: 1px 0px; +} + +#configurationContainer, +.header-footer { + margin-bottom: 5px; + padding: 20px 0px; + border-radius: 7px; + max-width: 90%; + margin: auto; +} + + +.header-footer { + background-color: #0000; + display: flex; + justify-content: space-between; +} + +.header-footer h4, +.header-footer div, +.header-footer a { + line-height: 0; + margin: auto 0; + +} + +/* loading icons */ +.header-footer p { + margin: 20px 0; +} + +.header-footer h4 { + color: darkblue; +} + +.header-footer .feather { + height: 3em !important; + stroke-width: 3 !important; +} + +#save, +#json { + min-width: 15%; + height: auto; + display: block; + min-height: fit-content; + margin-left: auto; + margin-right: 2%; +} + +/* configuration list page */ + +.section-card { + max-width: 95%; + margin-left: auto; + margin-right: auto; + margin-bottom: 1vh; +} + +.section-card h4 { + margin-top: 5px; + background-color: #0000 !important; +} + +.section-card-header { + background-color: #e1e1e15e; + display: flex; + align-items: center; + padding: 0px 10px; + border-bottom: 1px solid rgba(0, 0, 0, 0.355); + /* justify-content: center; */ +} + +.switch, +.section-card-header input { + position: relative; + display: inline-block; + width: 52px; + height: 27px; + margin-left: auto; +} + +.switch input { + opacity: 0; + width: 0; + height: 0; +} + +.slider { + position: absolute; + cursor: pointer; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: #ccc; + -webkit-transition: .4s; + transition: .4s; +} + +.slider:before { + position: absolute; + content: ""; + height: calc(27px - 7px); + width: calc(27px - 7px); + left: 4px; + bottom: 4px; + background-color: white; + -webkit-transition: .4s; + transition: .4s; +} + +input:checked+.slider { + background-color: darkblue; +} + +input:checked+.slider:before { + -webkit-transform: translateX(26px); + -ms-transform: translateX(26px); + transform: translateX(26px); +} + +.slider, +.slider:before { + border-radius: 7px +} + +/* param container and content styling */ +.param { + text-align: left; + padding: 5px; + border-bottom: 1px solid rgba(0, 0, 0, 0.355); + transition: 1s; +} + +.param input, +.section-card-header input { + background-color: #ffffff11; + border-radius: 7px; + border: solid 1px; + color: #181818; + min-width: 40%; + max-width: 70%; +} + +.section-card-header input { + min-width: calc(27px - 7px); +} + +.param p, +.param i { + font-size: .7em; + margin-top: 4px; +} + +.param i { + font-size: .7em; + margin-bottom: 5px; +} + +.param p { + padding-right: 5px; + max-width: 777px; +} + +.param h5 { + font-size: 1em; +} + +.param h5, +p { + margin: 5px; + margin-left: 0px; + margin-bottom: 0px; +} + +.param button { + width: 20px; + height: 20px; + line-height: 0; + padding: 1px; + box-shadow: none; + margin-bottom: 5px; +} + +.param-input { + display: block; + float: left; + min-width: 100%; +} + +.param-input input { + min-width: 70%; +} + +.param-input input[type="time"] { + min-width: 35%; +} + +/* when requirement param is not met */ +.requirement-disable { + pointer-events: none; + filter: opacity(25%); +} + +/* configuration box page */ + +#config-box { + min-width: 100%; + min-height: 85vh; +} + + /* mobile only changes */ @@ -989,6 +1211,9 @@ select, } } + + + /* Dark Theme Support */ @media (prefers-color-scheme: dark) { html.adaptive { @@ -1031,6 +1256,9 @@ select, } h2, + h3, + h4, + .header-footer h4, kbd, a { background-color: #111111; @@ -1094,7 +1322,8 @@ select, } .input-list input, - .input-box textarea { + .input-box textarea, + #config-box { background-color: #282928; border-color: #e1e1e1; color: #e1e1e1 @@ -1110,5 +1339,28 @@ select, border-top: 16px solid #ccf3ff; } + input:checked+.slider { + background-color: #ccf3ff; + } -} + .param { + border-color: rgba(255, 255, 255, 0.355); + } + + .param input, + .section-card-header input { + color: #e1e1e1; + } + + .section-card-header { + background-color: #ffffff11; + } + + + #configurationContainer { + background-color: #ffffff07; + border: 0; + } + + +} \ No newline at end of file diff --git a/src/emhass/templates/configuration.html b/src/emhass/templates/configuration.html new file mode 100644 index 00000000..4c406e50 --- /dev/null +++ b/src/emhass/templates/configuration.html @@ -0,0 +1,77 @@ + + + + + EMHASS: Energy Management Optimization for Home Assistant + + + + + + + + +
+ + + + +

EMHASS: Energy Management Optimization for Home Assistant

+
+ + +
+
+ + + + + + + + \ No newline at end of file diff --git a/src/emhass/templates/index.html b/src/emhass/templates/index.html index 237cd328..622f3493 100644 --- a/src/emhass/templates/index.html +++ b/src/emhass/templates/index.html @@ -4,7 +4,7 @@ EMHASS: Energy Management Optimization for Home Assistant - + @@ -14,6 +14,12 @@