From 446adc85b44b24e815fb09addf42236f2516abed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=2E=20L=C3=B3pez?= Date: Thu, 8 Feb 2024 17:35:34 +0100 Subject: [PATCH] Development branch. Testing ha-historical-sensors 3.x --- custom_components/ideenergy/const.py | 3 + .../ideenergy/datacoordinator.py | 22 +- custom_components/ideenergy/entity.py | 6 +- custom_components/ideenergy/fixes.py | 239 ------------------ custom_components/ideenergy/manifest.json | 4 +- custom_components/ideenergy/sensor.py | 128 ++-------- 6 files changed, 52 insertions(+), 350 deletions(-) delete mode 100644 custom_components/ideenergy/fixes.py diff --git a/custom_components/ideenergy/const.py b/custom_components/ideenergy/const.py index 9a7fcce..fc7e1bf 100644 --- a/custom_components/ideenergy/const.py +++ b/custom_components/ideenergy/const.py @@ -17,11 +17,14 @@ from datetime import timedelta +from zoneinfo import ZoneInfo DOMAIN = "ideenergy" CONF_CONTRACT = "contract" +LOCAL_TZ = ZoneInfo("Europe/Madrid") + MEASURE_MAX_AGE = 60 * 50 # Fifty minutes MAX_RETRIES = 3 MIN_SCAN_INTERVAL = 60 diff --git a/custom_components/ideenergy/datacoordinator.py b/custom_components/ideenergy/datacoordinator.py index 4bde39a..805724d 100644 --- a/custom_components/ideenergy/datacoordinator.py +++ b/custom_components/ideenergy/datacoordinator.py @@ -21,10 +21,11 @@ from datetime import datetime, timedelta, timezone from typing import Any, TypedDict -import ideenergy from homeassistant.core import dt_util from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +import ideenergy + from .barrier import Barrier, BarrierDeniedError from .const import ( DATA_ATTR_HISTORICAL_CONSUMPTION, @@ -33,6 +34,7 @@ DATA_ATTR_MEASURE_ACCUMULATED, DATA_ATTR_MEASURE_INSTANT, HISTORICAL_PERIOD_LENGHT, + LOCAL_TZ, ) from .entity import IDeEntity @@ -242,7 +244,9 @@ async def get_direct_reading_data(self) -> dict[str, int | float]: async def get_historical_consumption_data(self) -> Any: end = datetime.today() start = end - HISTORICAL_PERIOD_LENGHT + data = await self.api.get_historical_consumption(start=start, end=end) + data.periods = [normalize_period_item(x) for x in data.periods] return {DATA_ATTR_HISTORICAL_CONSUMPTION: data} @@ -251,9 +255,25 @@ async def get_historical_generation_data(self) -> Any: start = end - HISTORICAL_PERIOD_LENGHT data = await self.api.get_historical_generation(start=start, end=end) + raise NotImplementedError() + return {DATA_ATTR_HISTORICAL_GENERATION: data} async def get_historical_power_demand_data(self) -> Any: data = await self.api.get_historical_power_demand() + data.demands = [normalize_dated_item(x) for x in data.demands] return {DATA_ATTR_HISTORICAL_POWER_DEMAND: data} + + +def normalize_period_item(item): + item.start = item.start.replace(tzinfo=LOCAL_TZ) + item.end = item.end.replace(tzinfo=LOCAL_TZ) + + return item + + +def normalize_dated_item(item): + item.dt = item.dt.replace(tzinfo=LOCAL_TZ) + + return item diff --git a/custom_components/ideenergy/entity.py b/custom_components/ideenergy/entity.py index 583b2a9..b730821 100644 --- a/custom_components/ideenergy/entity.py +++ b/custom_components/ideenergy/entity.py @@ -26,8 +26,8 @@ from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import slugify -from homeassistant_historical_sensor.recorderutil import ( - delete_entity_invalid_states, +from homeassistant_historical_sensor.timemachine import ( + delete_invalid_states, hass_recorder_session, ) @@ -96,7 +96,7 @@ async def async_delete_invalid_states(self) -> int: def fn(): with hass_recorder_session(self.hass) as session: - return delete_entity_invalid_states(session, self) + return delete_invalid_states(session, self) return await recorder.get_instance(self.hass).async_add_executor_job(fn) diff --git a/custom_components/ideenergy/fixes.py b/custom_components/ideenergy/fixes.py deleted file mode 100644 index 3324698..0000000 --- a/custom_components/ideenergy/fixes.py +++ /dev/null @@ -1,239 +0,0 @@ -# Copyright (C) 2021-2022 Luis López -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, -# USA. - - -import logging - -import sqlalchemy as sa -from homeassistant.components import recorder -from homeassistant.components.recorder import db_schema, statistics -from homeassistant.core import HomeAssistant, dt_util -from homeassistant_historical_sensor import recorderutil - -_LOGGER = logging.getLogger(__name__) - - -async def async_fix_statistics( - hass: HomeAssistant, statistic_metadata: statistics.StatisticMetaData -) -> None: - def timestamp_as_local(timestamp): - return dt_util.as_local(dt_util.utc_from_timestamp(timestamp)) - - def fn(): - fixes_applied = False - - statistic_id = statistic_metadata["statistic_id"] - statistic_metadata_has_mean = statistic_metadata.get("has_mean", False) - statistic_metadata_has_sum = statistic_metadata.get("has_sum", False) - - with recorderutil.hass_recorder_session(hass) as session: - # - # Check and fix current metadata - # - - current_metadata = session.execute( - sa.select(db_schema.StatisticsMeta).where( - db_schema.StatisticsMeta.statistic_id == statistic_id - ) - ).scalar() - - if current_metadata is None: - _LOGGER.debug(f"{statistic_id}: no statistics found, nothing to fix") - return - - statistics_base_stmt = sa.select(db_schema.Statistics).where( - db_schema.Statistics.metadata_id == current_metadata.id - ) - - metadata_needs_fixes = ( - current_metadata.has_mean != statistic_metadata_has_mean - ) or (current_metadata.has_sum != statistic_metadata_has_sum) - - if metadata_needs_fixes: - _LOGGER.debug( - f"{statistic_id}: statistic metadata is outdated." - f" has_mean:{current_metadata.has_mean}→{statistic_metadata_has_mean}" - f" has_sum:{current_metadata.has_sum}→{statistic_metadata_has_sum}" - ) - current_metadata.has_mean = statistic_metadata_has_mean - current_metadata.has_sum = statistic_metadata_has_sum - session.add(current_metadata) - session.commit() - fixes_applied = True - - # - # Check for broken points and decreasings - # - broken_point = None - - prev_sum = 0 - statistics_iter_stmt = statistics_base_stmt.order_by( - db_schema.Statistics.start_ts.asc() - ) - - for statistic in session.execute(statistics_iter_stmt).scalars(): - is_broken = False - local_start_dt = timestamp_as_local(statistic.start_ts) - - # Check for NULL mean - if statistic_metadata_has_mean and statistic.mean is None: - is_broken = True - _LOGGER.debug( - f"{statistic_id}: mean value at {local_start_dt} is NULL" - ) - - # Check for NULL sum - if statistic_metadata_has_sum and statistic.sum is None: - is_broken = True - _LOGGER.debug( - f"{statistic_id}: sum value at {local_start_dt} is NULL" - ) - - # Check for decreasing values in sum - if statistic_metadata_has_sum and statistic.sum: - if statistic.sum < prev_sum: - is_broken = True - _LOGGER.debug( - f"{statistic_id}: " - + f"decreasing sum at {local_start_dt} " - + f"{statistic.sum} < {prev_sum} ({statistic!r})" - ) - else: - prev_sum = statistic.sum - - # Found anything broken? - if is_broken: - broken_point = statistic.start_ts - break - - # - # Check for broken points (search only for NULLs) - # - - # clauses_for_additional_or_ = [db_schema.Statistics.state == None] - # if statistic_metadata_has_mean: - # clauses_for_additional_or_.append(db_schema.Statistics.mean == None) - # if statistic_metadata_has_sum: - # clauses_for_additional_or_.append(db_schema.Statistics.sum == None) - - # find_broken_point_stmt = ( - # sa.select(sa.func.min(db_schema.Statistics.start_ts)) - # .where(db_schema.Statistics.metadata_id == current_metadata.id) - # .where(sa.or_(*clauses_for_additional_or_)) - # ) - - # broken_point = session.execute(find_broken_point_stmt).scalar() - - # - # Delete everything after broken point - # - if broken_point: - invalid_statistics_stmt = statistics_base_stmt.where( - db_schema.Statistics.start_ts >= broken_point - ) - invalid_statistics = ( - session.execute(invalid_statistics_stmt).scalars().fetchall() - ) - - for x in invalid_statistics: - session.delete(x) - - session.commit() - fixes_applied = True - - _LOGGER.debug( - f"{statistic_id}: " - f"found broken point at {timestamp_as_local(broken_point)}," - f" deleted {len(invalid_statistics)} statistics" - ) - - # - # Delete additional statistics - # - - clauses_for_additional_or_ = [db_schema.Statistics.state == None] - - if statistic_metadata_has_mean: - clauses_for_additional_or_.append(db_schema.Statistics.mean == None) - - if statistic_metadata_has_sum: - clauses_for_additional_or_.append(db_schema.Statistics.sum == None) - - invalid_statistics_stmt = statistics_base_stmt.where( - sa.or_(*clauses_for_additional_or_) - ) - - invalid_statistics = ( - session.execute(invalid_statistics_stmt).scalars().fetchall() - ) - - if invalid_statistics: - for o in invalid_statistics: - session.delete(o) - session.commit() - fixes_applied = True - - _LOGGER.debug( - f"{statistic_id}: " - f"deleted {len(invalid_statistics)} statistics with invalid attributes" - ) - - if not fixes_applied: - _LOGGER.debug(f"{statistic_id}: no problems found") - - # - # Recalculate - # - - # if not broken_point and not force_recalculate: - # return - - # if broken_point: - # _LOGGER.debug( - # f"{statistic_id}: found broken statistics since" - # f" {timestamp_as_local(broken_point.start_ts)}," - # f" recalculating everything from there" - # ) - - # - # Recalculate all stats - # - - # accumulated = 0 - # for statistic in session.execute( - # sa.select(db_schema.Statistics) - # .where(db_schema.Statistics.metadata_id == statistic_id) - # .order_by(db_schema.Statistics.start_ts.asc) - # ): - # accumulated = accumulated + statistic.state - - # # fmt: off - # statistic.mean = statistic.state if statistic_metadata_has_mean else None - # statistic.sum = accumulated if statistic_metadata_has_sum else None - # statistic.min = None - # statistic.max = None - # # fmt: on - - # session.add(statistic) - # _LOGGER.debug( - # f"{statistic_id}: " - # f"update {statistic.id} {timestamp_as_local(statistic.start_ts)} " - # f"value={statistic.value}\tsum={statistic.sum}" - # ) - # session.commit() - - return await recorder.get_instance(hass).async_add_executor_job(fn) diff --git a/custom_components/ideenergy/manifest.json b/custom_components/ideenergy/manifest.json index f43e571..f918cfd 100644 --- a/custom_components/ideenergy/manifest.json +++ b/custom_components/ideenergy/manifest.json @@ -14,7 +14,7 @@ "issue_tracker": "https://github.com/ldotlopez/ha-ideenergy/issues", "requirements": [ "ideenergy>=2.0.0rc1", - "homeassistant-historical-sensor==2.0.0rc5" + "homeassistant-historical-sensor==3.0.0a1" ], - "version": "2.1.2" + "version": "2024.0.0" } diff --git a/custom_components/ideenergy/sensor.py b/custom_components/ideenergy/sensor.py index 192368b..a0d73a9 100644 --- a/custom_components/ideenergy/sensor.py +++ b/custom_components/ideenergy/sensor.py @@ -24,15 +24,13 @@ # Check sensor.SensorEntityDescription # https://github.com/home-assistant/core/blob/dev/homeassistant/components/sensor/__init__.py - import itertools import logging from collections.abc import Callable -from datetime import datetime, timedelta +from datetime import datetime +from math import ceil from typing import Any -from homeassistant.components import recorder -from homeassistant.components.recorder import statistics from homeassistant.components.recorder.models import StatisticData, StatisticMetaData from homeassistant.components.sensor import ( SensorDeviceClass, @@ -50,9 +48,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import DiscoveryInfoType -from homeassistant.util import dt as dtutil from homeassistant_historical_sensor import HistoricalSensor, HistoricalState -from ideenergy.types import PeriodValue +from homeassistant_historical_sensor import timemachine as tm from .const import DOMAIN from .datacoordinator import ( @@ -64,11 +61,9 @@ DataSetType, ) from .entity import IDeEntity -from .fixes import async_fix_statistics PLATFORM = "sensor" -MAINLAND_SPAIN_ZONEINFO = dtutil.zoneinfo.ZoneInfo("Europe/Madrid") _LOGGER = logging.getLogger(__name__) @@ -105,17 +100,6 @@ def get_statistic_metadata(self) -> StatisticMetaData: return meta - async def async_added_to_hass(self): - await super().async_added_to_hass() - - # - # In 2.0 branch we f**ked statistiscs. - # Don't set state_class attributes for historical sensors! - # - # FIXME: Remove in future 3.0 series. - # - await async_fix_statistics(self.hass, self.get_statistic_metadata()) - async def async_calculate_statistic_data( self, hist_states: list[HistoricalState], *, latest: dict | None ) -> list[StatisticData]: @@ -136,50 +120,19 @@ async def async_calculate_statistic_data( # def hour_block_for_hist_state(hist_state: HistoricalState) -> datetime: - # XX:00:00 states belongs to previous hour block - if hist_state.dt.minute == 0 and hist_state.dt.second == 0: - dt = hist_state.dt - timedelta(hours=1) - return dt.replace(minute=0, second=0, microsecond=0) - - else: - return hist_state.dt.replace(minute=0, second=0, microsecond=0) - - # - # Ignore supplied 'lastest' and fetch again from recorder - # FIXME: integrate into homeassistant_historical_sensor and remove - # - - def get_last_statistics(): - ret = statistics.get_last_statistics( - self.hass, - 1, - self.statistic_id, - convert_units=True, - types={"sum"}, - ) - - # ret can be none or {} - if not ret: - return None + secs_per_hour = 60 * 60 - try: - return ret[self.statistic_id][0] + ts = ceil(hist_state.ts) + block = ts // secs_per_hour + leftover = ts % secs_per_hour - except KeyError: - # No stats found - return None + if leftover == 0: + block = block - 1 - except IndexError: - # What? - _LOGGER.error( - f"{self.statatistic_id}: " - + "[bug] found last statistics key but doesn't have any value! " - + f"({ret!r})" - ) - raise + return block * secs_per_hour - latest = await recorder.get_instance(self.hass).async_add_executor_job( - get_last_statistics + latest = await tm.hass_get_last_statistic( + self.hass, self.get_statistic_metadata() ) # @@ -212,7 +165,7 @@ def extract_last_sum(latest) -> float: ret = [] - for dt, collection_it in itertools.groupby( + for hour_block, collection_it in itertools.groupby( hist_states, key=hour_block_for_hist_state ): collection = list(collection_it) @@ -223,7 +176,7 @@ def extract_last_sum(latest) -> float: ret.append( StatisticData( - start=dt, + start=dt_util.utc_from_timestamp(hour_block), state=hour_accumulated, # mean=hour_mean, sum=total_accumulated, @@ -331,7 +284,10 @@ def historical_states(self): # FIXME: This should be None, fix ha-historical-sensor return [] - ret = historical_states_from_period_values(data.periods) + ret = [ + HistoricalState(state=x.value, ts=x.end.timestamp()) for x in data.periods + ] + return ret @@ -368,8 +324,7 @@ def historical_states(self): # FIXME: This should be None, fix ha-historical-sensor return [] - ret = historical_states_from_period_values(data.periods) - return ret + return [HistoricalState(state=state, ts=ts) for (ts, state) in data] class HistoricalPowerDemand(HistoricalSensorMixin, IDeEntity, SensorEntity): @@ -390,13 +345,11 @@ def historical_states(self): # FIXME: This should be None, fix ha-historical-sensor return [] - def demand_at_instant_as_historical_state(item): - return HistoricalState( - state=item.value / 1000, - dt=item.dt.replace(tzinfo=MAINLAND_SPAIN_ZONEINFO), - ) + ret = [ + HistoricalState(state=x.value / 1000, ts=x.dt.timestamp()) + for x in data.demands + ] - ret = [demand_at_instant_as_historical_state(x) for x in data.demands] return ret @@ -428,41 +381,6 @@ async def async_setup_entry( async_add_devices(sensors) -def historical_states_from_historical_api_data( - data: list[dict] | None = None, -) -> list[HistoricalState]: - def _convert_item(item): - # FIXME: What about canary islands? - dt = item["end"].replace(tzinfo=MAINLAND_SPAIN_ZONEINFO) - last_reset = item["start"].replace(tzinfo=MAINLAND_SPAIN_ZONEINFO) - - return HistoricalState( - state=item["value"] / 1000, - dt=dt, - attributes={"last_reset": last_reset}, - ) - - return [_convert_item(item) for item in data or []] - - -def historical_states_from_period_values( - period_values: list[PeriodValue], -) -> list[HistoricalState]: - def fn(): - for item in period_values: - # FIXME: What about canary islands? - dt = item.end.replace(tzinfo=MAINLAND_SPAIN_ZONEINFO) - last_reset = item.start.replace(tzinfo=MAINLAND_SPAIN_ZONEINFO) - - yield HistoricalState( - state=item.value / 1000, - dt=dt, - attributes={"last_reset": last_reset}, - ) - - return list(fn()) - - async def async_get_last_state_safe( entity: RestoreEntity, convert_fn: Callable[[Any], Any] ) -> Any: