From 7e9adc7a7274c12761bd6d0b8c617356a427da04 Mon Sep 17 00:00:00 2001 From: Zach Sanson <47839859+WibblyGhost@users.noreply.github.com> Date: Sun, 30 Oct 2022 21:03:42 +1300 Subject: [PATCH] Extend Unit Tests (#10) * Add more unit tests * Remodel test classes * Restructured package directory * Modify unit test cases * Fix import errors and structure * Updated MQTT Classes unit tests * Added decode message tests * Complete MQTT Classes unit tests * Update other unit tests * Completed logger tests * Ran formatter over code * Update scripts --- .dockerignore | 12 +- .gitignore | 6 +- classes/py_logger.py | 158 ----- docker-compose.yml | 6 +- pyproject.toml | 16 +- requirements-test.txt | 2 +- setup.py | 21 +- solar-logger-build.ps1 | 24 +- solar-logger.dockerfile | 58 +- {app => src}/__init__.py | 0 {classes => src/app}/__init__.py | 0 {app => src/app}/influx_query.py | 46 +- {app => src/app}/solar_main.py | 33 +- src/classes/__init__.py | 0 {classes => src/classes}/common_classes.py | 37 +- {classes => src/classes}/custom_exceptions.py | 0 {classes => src/classes}/influx_classes.py | 2 +- {classes => src/classes}/mqtt_classes.py | 46 +- {classes => src/classes}/query_classes.py | 0 {config => src/config}/config.ini | 5 +- src/helpers/__init__.py | 0 {classes => src/helpers}/consts.py | 2 +- {classes => src/helpers}/py_functions.py | 8 +- src/helpers/py_logger.py | 203 ++++++ start_logger.py | 8 + start_query.py | 9 + tests/app/test_influx_query.py | 9 +- tests/app/test_solar_main.py | 9 +- tests/classes/test_common_classes.py | 162 +++-- tests/classes/test_influx_classes.py | 198 +++--- tests/classes/test_mqtt_classes.py | 657 +++++++++++++++--- tests/classes/test_py_functions.py | 31 - tests/classes/test_query_classes.py | 264 +++---- tests/config/config.ini | 40 ++ tests/config/consts.py | 162 ++++- tests/helpers/__init__.py | 0 tests/helpers/test_py_functions.py | 87 +++ tests/helpers/test_py_logger.py | 180 +++++ 38 files changed, 1759 insertions(+), 742 deletions(-) delete mode 100644 classes/py_logger.py rename {app => src}/__init__.py (100%) rename {classes => src/app}/__init__.py (100%) rename {app => src/app}/influx_query.py (76%) rename {app => src/app}/solar_main.py (87%) create mode 100644 src/classes/__init__.py rename {classes => src/classes}/common_classes.py (87%) rename {classes => src/classes}/custom_exceptions.py (100%) rename {classes => src/classes}/influx_classes.py (98%) rename {classes => src/classes}/mqtt_classes.py (86%) rename {classes => src/classes}/query_classes.py (100%) rename {config => src/config}/config.ini (84%) create mode 100644 src/helpers/__init__.py rename {classes => src/helpers}/consts.py (90%) rename {classes => src/helpers}/py_functions.py (89%) create mode 100644 src/helpers/py_logger.py create mode 100644 start_logger.py create mode 100644 start_query.py delete mode 100644 tests/classes/test_py_functions.py create mode 100644 tests/config/config.ini create mode 100644 tests/helpers/__init__.py create mode 100644 tests/helpers/test_py_functions.py create mode 100644 tests/helpers/test_py_logger.py diff --git a/.dockerignore b/.dockerignore index 0ce1cfe..c43d012 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,7 +3,7 @@ __pycache__/ .pytest_cache/ .idea/ .vscode/ -venv/ +*.venv/ *.py[cod] # Secret files @@ -20,4 +20,12 @@ htmlcov/ # Docker Mappings docker-solar-logger/ -docker-influxdb/ \ No newline at end of file +docker-influxdb/ + +# Build Files +*.egg-info/ +build +dist + +# Backup FIles +backups/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index 40cfdd4..c43d012 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,6 @@ __pycache__/ .pytest_cache/ .idea/ .vscode/ -venv/ *.venv/ *.py[cod] @@ -26,4 +25,7 @@ docker-influxdb/ # Build Files *.egg-info/ build -dist \ No newline at end of file +dist + +# Backup FIles +backups/ \ No newline at end of file diff --git a/classes/py_logger.py b/classes/py_logger.py deleted file mode 100644 index 4fb36ef..0000000 --- a/classes/py_logger.py +++ /dev/null @@ -1,158 +0,0 @@ -""" -Contains all functions required to setup logging -""" - -import configparser -import logging -import os -from logging import Logger -from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler - -from classes.consts import CONFIG_FILENAME -from classes.custom_exceptions import MissingConfigurationError - - -class LoggingTools: - """ - Class contains all tools required to create loggers - """ - - def __init__(self, config_name: str, logger: Logger) -> None: - """ - Initialization of logging class - """ - self._debug_level = None - self._file_format = None - self._date_format = None - self._is_file_logging = None - self._log_rotation = None - self._file_location = None - self._file_path = None - self._max_file_bytes = None - self._max_file_no = None - self._read_configs(config_name) - - self._create_stdout_logger(logger=logger) - if self._is_file_logging and self._log_rotation == "size_based": - self._create_rotating_file_logger(logger=logger) - elif self._is_file_logging and self._log_rotation == "time_based": - self._create_timed_rotating_file_logger(logger=logger) - - def _read_configs(self, config_name: str) -> None: - """ - Reads config file and parses and stores the result - """ - config_parser = configparser.ConfigParser() - try: - config_parser.read(CONFIG_FILENAME) - debug_dict = { - "DEBUG": logging.DEBUG, - "INFO": logging.INFO, - "WARNING": logging.WARNING, - "ERROR": logging.ERROR, - "CRITICAL": logging.CRITICAL, - } - self._debug_level = debug_dict[ - config_parser.get(config_name, "debug_level") - ] - self._file_format = config_parser.get(config_name, "format") - self._date_format = config_parser.get(config_name, "dateformat") - self._is_file_logging = config_parser.getboolean( - config_name, "file_logging" - ) - - if None in [self._debug_level, self._file_format, self._date_format]: - logging.critical("Failed to read basic logger configs") - raise MissingConfigurationError("Failed to read basic logger configs") - - if self._is_file_logging: - self._log_rotation = config_parser.get(config_name, "log_rotation") - - self._file_location = config_parser.get(config_name, "file_location") - file_name = config_parser.get(config_name, "file_name") - self._file_path = self._file_location + file_name - self._max_file_bytes = int( - config_parser.get(config_name, "max_file_bytes") - ) - self._max_file_no = int(config_parser.get(config_name, "max_file_no")) - - if None in [ - self._file_location, - self._file_path, - self._max_file_bytes, - self._max_file_no, - ]: - logging.critical("Failed to read file logger settings in configs") - raise MissingConfigurationError( - "Failed to read file logger settings in configs" - ) - except Exception as err: - logging.critical("An unexpected exception has occurred") - raise err - - def _create_stdout_logger(self, logger: Logger) -> None: - """ - Creates a standard STDOUT logger - """ - logger.setLevel(self._debug_level) - stream_handler = logging.StreamHandler() - stream_handler.setLevel(self._debug_level) - log_formatter = logging.Formatter( - fmt=self._file_format, datefmt=self._date_format - ) - stream_handler.setFormatter(log_formatter) - logger.addHandler(stream_handler) - logging.info("Created stdout logger") - - def _create_rotating_file_logger(self, logger: Logger) -> None: - """ - Creates a rotating file logger which limits log files size - and when exceeding that size, creates a new log file - """ - if not os.path.exists(self._file_location): - os.makedirs(self._file_location) - - rotating_handler = RotatingFileHandler( - filename=self._file_path, - maxBytes=self._max_file_bytes, - backupCount=self._max_file_no, - ) - rotating_handler.setLevel(self._debug_level) - log_formatter = logging.Formatter( - fmt=self._file_format, datefmt=self._date_format - ) - rotating_handler.setFormatter(log_formatter) - logger.addHandler(rotating_handler) - logging.info(f"Created rotating file log file at {self._file_path}") - - def _create_timed_rotating_file_logger(self, logger: Logger) -> None: - """ - Creates a rotating file logger which limits log files size - and when exceeding that size, creates a new log file - """ - if not os.path.exists(self._file_location): - os.makedirs(self._file_location) - rotating_time_handler = TimedRotatingFileHandler( - filename=self._file_path, - when="midnight", - backupCount=self._max_file_no, - ) - rotating_time_handler.suffix = "%Y-%m-%d" - rotating_time_handler.setLevel(self._debug_level) - log_formatter = logging.Formatter( - fmt=self._file_format, datefmt=self._date_format - ) - rotating_time_handler.setFormatter(log_formatter) - logger.addHandler(rotating_time_handler) - logging.info(f"Created time rotating file log file at {self._file_path}") - - -def create_logger(config_name: str) -> logging: - """ - Creates a logging instance, can be customized through the config.ini - :param config_name: Section under the config for the configuration to pull data from - :return: Logger for logging - """ - logger = logging.getLogger() - LoggingTools(config_name=config_name, logger=logger) - return logging diff --git a/docker-compose.yml b/docker-compose.yml index cdb2512..ee3a693 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,8 +16,8 @@ services: dockerfile: solar-logger.dockerfile target: solar-logger volumes: - - ./docker-solar-logger/output:/app/output:rw - - ./docker-solar-logger/config:/app/config:rw + - ./output:/solarlogger/output:rw + - ./config:/solarlogger/config:rw env_file: - .env @@ -30,7 +30,7 @@ services: dockerfile: solar-logger.dockerfile target: influx-query volumes: - - ./docker-influx-query/output:/app/output:rw + - ./output:/solarlogger/output:rw env_file: - .env diff --git a/pyproject.toml b/pyproject.toml index 8eb1b16..02ee309 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,32 +1,32 @@ [tool.pytest.ini_options] minversion = "6.0" -addopts = "--cov-report=xml --cov-report=term --cov" +addopts = "-ra --cov-report=xml:coverage.xml --cov-report=term --cov-report=html --cov" testpaths = ["tests"] [tool.coverage.run] -source = ["app", "classes"] +source = ["src"] omit = [ - "tests", - "config", - "*/py_logger.py", + "*/tests/*", + "*/config/*", + # "*/py_logger.py", "*/__init__.py", ] [tool.isort] profile = "black" -src_paths = ["classes", "app", "tests"] +src_paths = ["src", "tests"] skip_glob = ["*.venv/*"] skip_gitignore = true -multi_line_output = 5 overwrite_in_place = true [tool.pylint.master] +init-hook='import sys; sys.path.append("src")' +# init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" ignore=[ ".pytest_cache", ".venv", ".github", ] -# init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" [tool.pylint.main] # Specify a score threshold under which the program will exit with error. diff --git a/requirements-test.txt b/requirements-test.txt index 5744cc4..0e0c6e4 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -3,9 +3,9 @@ pylint black isort -gitpython faker pytest pytest-coverage +pytest-mock -r requirements.txt \ No newline at end of file diff --git a/setup.py b/setup.py index 69dabb3..012c1a8 100644 --- a/setup.py +++ b/setup.py @@ -7,19 +7,22 @@ import setuptools PACKAGE_NAME = "solar_logger" -PACKAGE_DIR = "." +PACKAGE_DIR = "src" EXCLUDED_PACKAGES = ["*tests*"] -VERSION = "0.0.0" -with open("README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() +with open("README.md", "r", encoding="utf-8") as open_readme_file: + long_description = open_readme_file.read() -with open("__version__.py", "r") as fh: - version = fh.read() - version = version.strip().split(" = ")[1] - version = re.sub("[\"']", "", version) - VERSION = re.sub("[-*]", "_", version) +def get_version(): + with open("src/__version__.py", "r") as open_version_file: + version = open_version_file.read() + version = version.strip().split(" = ")[1] + version = re.sub("[\"']", "", version) + return re.sub("[-*]", "_", version) + + +VERSION = get_version() setuptools.setup( name=PACKAGE_NAME.lower(), diff --git a/solar-logger-build.ps1 b/solar-logger-build.ps1 index 41cb31b..3b18454 100644 --- a/solar-logger-build.ps1 +++ b/solar-logger-build.ps1 @@ -2,7 +2,6 @@ # $(pwd) - Expands to working directory on Linux or Mac # ${pwd} - Expands to working directory on Windows IN POWERSHELL -$Development = $FALSE $IsFromDockerHub = $TRUE $CurrentDir = ${pwd} @@ -13,34 +12,33 @@ $RestartMode = "unless-stopped" if (!${IsFromDockerHub} -and !$Development) { # Start by building an image of SolarLogger locally - docker build . -f solar.dockerfile -t solar-logger-local + docker build . -f solar-logger.dockerfile -t solar-logger-local } # Before running the Docker images I would suggest creating the config and output volumes first # Otherwise the config.ini won't get copied across -if (!(Test-Path -Path "${CurrentDir}/docker-solar-logger/output")) { - mkdir -p "${CurrentDir}/docker-solar-logger/output" +if (!(Test-Path -Path "${CurrentDir}/output")) { + mkdir -p "${CurrentDir}/output" } -if (!(Test-Path -Path "${CurrentDir}/docker-solar-logger/config")) { - mkdir -p "${CurrentDir}/docker-solar-logger/config" +if (!(Test-Path -Path "${CurrentDir}/config")) { + mkdir -p "${CurrentDir}/config" } # CONFIG VOLUMES # docker volume create \ -docker volume create --driver local --opt type=none --opt device="${CurrentDir}/docker-solar-logger/config" --opt o=bind SolarLogger-Config +docker volume create --driver local --opt type=none --opt device="${CurrentDir}/config" --opt o=bind SolarLogger-Config # OUTPUT VOLUMES -docker volume create --driver local --opt type=none --opt device="${CurrentDir}/docker-solar-logger/output" --opt o=bind SolarLogger-Output +docker volume create --driver local --opt type=none --opt device="${CurrentDir}/output" --opt o=bind SolarLogger-Output # Run the Docker image with an environment file, output folder and config folder -if ($Development) { - docker build . -f solar.dockerfile -t wibblyghost/solar-logger --target solar-logger -} elseif (${IsFromDockerHub}) { +if (${IsFromDockerHub}) { # If the image is built from Docker hub - docker run -d --name solar-logger --restart="${RestartMode}" --env-file "${EnvFile}" --volume "SolarLogger-Config:/app/config" --volume "SolarLogger-Output:/app/output" wibblyghost/solar-logger:"${VersionTag}" + docker run -d --name solar-logger --restart="${RestartMode}" --env-file "${EnvFile}" --volume "SolarLogger-Config:/solarlogger/config" --volume "SolarLogger-Output:/solarlogger/output" wibblyghost/solar-logger:"${VersionTag}" } else { # If the image is built locally - docker run -d --name solar-logger --restart="$RestartMode" --env-file "${EnvFile}" --volume "SolarLogger-Config:/app/config" --volume "SolarLogger-Output:/app/output" solar-logger-local + docker build . -f solar-logger.dockerfile -t wibblyghost/solar-logger --target solar-logger -t solar-logger-local + docker run -d --name solar-logger --restart="$RestartMode" --env-file "${EnvFile}" --volume "SolarLogger-Config:/solarlogger/config" --volume "SolarLogger-Output:/solarlogger/output" --network host solar-logger-local } diff --git a/solar-logger.dockerfile b/solar-logger.dockerfile index 1e7edeb..06d47a1 100644 --- a/solar-logger.dockerfile +++ b/solar-logger.dockerfile @@ -3,52 +3,56 @@ ARG BASE_DIR="solarlogger" WORKDIR ${BASE_DIR} # Run updates RUN apt-get update && pip install --upgrade pip -ADD requirements.txt ./ -RUN pip install -r ./requirements.txt +ADD requirements.txt requirements.txt +RUN pip install -r requirements.txt # /config -> /solarlogger/config -ADD ./config/config.ini config/config.ini +ADD src/config/config.ini src/config/config.ini FROM builder as solar-logger ARG BASE_DIR +ADD start_logger.py start_logger.py # /app -> /solarlogger/app -ADD ./app/solar_main.py app/solar_main.py +ADD src/app/solar_main.py src/app/solar_main.py # /classes -> /solarlogger/classes -ADD ./classes/common_classes.py classes/common_classes.py -ADD ./classes/consts.py classes/consts.py -ADD ./classes/custom_exceptions.py classes/custom_exceptions.py -ADD ./classes/influx_classes.py classes/influx_classes.py -ADD ./classes/mqtt_classes.py classes/mqtt_classes.py -ADD ./classes/py_functions.py classes/py_functions.py -ADD ./classes/py_logger.py classes/py_logger.py +ADD src/classes/common_classes.py src/classes/common_classes.py +ADD src/classes/custom_exceptions.py src/classes/custom_exceptions.py +ADD src/classes/influx_classes.py src/classes/influx_classes.py +ADD src/classes/mqtt_classes.py src/classes/mqtt_classes.py +# /helpers -> /solarlogger/helpers +ADD src/helpers/consts.py src/helpers/consts.py +ADD src/helpers/py_functions.py src/helpers/py_functions.py +ADD src/helpers/py_logger.py src/helpers/py_logger.py # Run instance -CMD [ "python", "app/solar_main.py" ] +CMD [ "python", "start_logger.py" ] FROM builder as influx-query ARG BASE_DIR # Add required modules +ADD start_query.py start_query.py # /app -> /solarlogger/app -ADD ./app/influx_query.py app/influx_query.py +ADD src/app/influx_query.py src/app/influx_query.py # /classes -> /solarlogger/classes -ADD ./classes/common_classes.py classes/common_classes.py -ADD ./classes/consts.py classes/consts.py -ADD ./classes/custom_exceptions.py classes/custom_exceptions.py -ADD ./classes/influx_classes.py classes/influx_classes.py -ADD ./classes/py_functions.py classes/py_functions.py -ADD ./classes/py_logger.py classes/py_logger.py -ADD ./classes/query_classes.py classes/query_classes.py +ADD src/classes/common_classes.py src/classes/common_classes.py +ADD src/classes/custom_exceptions.py src/classes/custom_exceptions.py +ADD src/classes/influx_classes.py src/classes/influx_classes.py +ADD src/classes/query_classes.py src/classes/query_classes.py +# /helpers -> /solarlogger/helpers +ADD src/helpers/consts.py src/helpers/consts.py +ADD src/helpers/py_functions.py src/helpers/py_functions.py +ADD src/helpers/py_logger.py src/helpers/py_logger.py # Run instance -CMD [ "python", "-i", "app/influx_query.py"] +CMD [ "python", "-i", "start_query.py"] FROM python:3.10.2 as unit-tests ARG BASE_DIR="solar_logger" WORKDIR /solarlogger/ # Run updates RUN apt-get update && pip install --upgrade pip -ADD requirements.txt ./requirements.txt -ADD requirements-test.txt ./requirements-test.txt +ADD requirements.txt requirements.txt +ADD requirements-test.txt requirements-test.txt RUN pip install -r requirements-test.txt -ADD ./app/ app/ -ADD ./classes/ classes/ -ADD ./tests/ tests/ +ADD src/ src/ +ADD tests/ tests/ +ADD pyproject.toml pyproject.toml # Run instance -CMD [ "python", "-m", "pytest", "./tests"] +CMD [ "python", "-m", "pytest"] diff --git a/app/__init__.py b/src/__init__.py similarity index 100% rename from app/__init__.py rename to src/__init__.py diff --git a/classes/__init__.py b/src/app/__init__.py similarity index 100% rename from classes/__init__.py rename to src/app/__init__.py diff --git a/app/influx_query.py b/src/app/influx_query.py similarity index 76% rename from app/influx_query.py rename to src/app/influx_query.py index 7e06aeb..00953c9 100644 --- a/app/influx_query.py +++ b/src/app/influx_query.py @@ -4,15 +4,15 @@ https://docs.influxdata.com/influxdb/v2.0/api-guide/client-libraries/python/#query-data-from-influxdb-with-python """ -from classes.common_classes import SecretStore -from classes.consts import INFLUX_DEBUG_CONFIG_TITLE, INFLUX_QUERY_CONFIG_TITLE -from classes.influx_classes import InfluxConnector -from classes.py_functions import read_query_settings, write_results_to_csv -from classes.py_logger import create_logger -from classes.query_classes import QueryBuilder +from src.classes.common_classes import SecretStore +from src.classes.influx_classes import InfluxConnector +from src.classes.query_classes import QueryBuilder +from src.helpers.consts import INFLUX_DEBUG_CONFIG_TITLE, INFLUX_QUERY_CONFIG_TITLE +from src.helpers.py_functions import read_query_settings, write_results_to_csv +from src.helpers.py_logger import create_logger -class ParseQuery: +class QueryParser: """ Contains all subfunctions which parse the query results """ @@ -77,12 +77,12 @@ def execute_query(query: QueryBuilder) -> None: raise err if query_mode == "csv": - ParseQuery.parse_csv(query_result=query_result) + QueryParser.parse_csv(query_result=query_result) elif query_mode == "flux": - flux = ParseQuery.parse_flux(query_result=query_result) + flux = QueryParser.parse_flux(query_result=query_result) print(flux) else: - ParseQuery.parse_stream(query_result=query_result) + QueryParser.parse_stream(query_result=query_result) def run_example() -> None: @@ -100,6 +100,18 @@ def run_example() -> None: execute_query(query_builder) +logging = create_logger(INFLUX_DEBUG_CONFIG_TITLE) +secret_store = SecretStore(has_mqtt_access=False, has_influx_access=True) +influx_connector = InfluxConnector(secret_store.influx_secrets) +logging.info("Attempting health check for InfluxDB") +try: + influx_connector.health_check() + logging.info("Successfully connected to InfluxDB server") +except Exception as error: + logging.critical("Failed to connect InfluxDB server") + raise error + + def main() -> None: """ Classes runtime which creates a query to an Influx database to view the tables @@ -112,17 +124,3 @@ def main() -> None: "Or run run_default() to run an example piece" ) run_example() - - -if __name__ == "__main__": - logging = create_logger(INFLUX_DEBUG_CONFIG_TITLE) - secret_store = SecretStore(has_mqtt_access=False, has_influx_access=True) - influx_connector = InfluxConnector(secret_store.influx_secrets) - logging.info("Attempting health check for InfluxDB") - try: - influx_connector.health_check() - logging.info("Successfully connected to InfluxDB server") - except Exception as error: - logging.critical("Failed to connect InfluxDB server") - raise error - main() diff --git a/app/solar_main.py b/src/app/solar_main.py similarity index 87% rename from app/solar_main.py rename to src/app/solar_main.py index 3b6d7f2..83ce3c9 100644 --- a/app/solar_main.py +++ b/src/app/solar_main.py @@ -7,11 +7,11 @@ import threading import time -from classes.common_classes import QueuePackage, SecretStore -from classes.consts import SOLAR_DEBUG_CONFIG_TITLE, THREADED_QUEUE -from classes.influx_classes import InfluxConnector -from classes.mqtt_classes import MqttConnector -from classes.py_logger import create_logger +from src.classes.common_classes import QueuePackage, SecretStore +from src.classes.influx_classes import InfluxConnector +from src.classes.mqtt_classes import MqttConnector +from src.helpers.consts import SOLAR_DEBUG_CONFIG_TITLE, THREADED_QUEUE +from src.helpers.py_logger import create_logger class ThreadedRunner: @@ -40,7 +40,7 @@ def sigint_handler(self, _signo, _stack_frame) -> None: time.sleep(1) self.thread_events.clear() - def main(self) -> None: + def start(self) -> None: """ Calls both the Influx database connector and the MQTT connector and runs them in separate threads @@ -92,9 +92,14 @@ def run_threaded_influx_writer(self) -> None: NOTE: Since this program needs to indefinitely run all exceptions will just be logged instead of exiting the program. """ - secret_store = SecretStore(has_influx_access=True) - influx_connector = InfluxConnector(secret_store=secret_store) - logging.info("Attempting health check for InfluxDB") + try: + secret_store = SecretStore(has_influx_access=True) + influx_connector = InfluxConnector(secret_store=secret_store) + logging.info("Attempting health check for InfluxDB") + except Exception: + logging.exception("Failed to setup environment") + self.thread_events.clear() + return try: influx_connector.health_check() logging.info("InfluxDB health check succeeded") @@ -167,7 +172,9 @@ def run_threaded_mqtt_client(self): self.thread_events.clear() -if __name__ == "__main__": - # THREAD_EVENTS = threading.Event() - TR = ThreadedRunner() - TR.main() +def main(): + """ + Main runtime for solar logger, called from start_logger.py + """ + thread_runner = ThreadedRunner() + thread_runner.start() diff --git a/src/classes/__init__.py b/src/classes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/classes/common_classes.py b/src/classes/common_classes.py similarity index 87% rename from classes/common_classes.py rename to src/classes/common_classes.py index 99a51c5..ce1273c 100644 --- a/classes/common_classes.py +++ b/src/classes/common_classes.py @@ -9,19 +9,8 @@ from dataclasses import dataclass from datetime import datetime -from classes.consts import MAX_PORT_RANGE -from classes.custom_exceptions import MissingCredentialsError - - -@dataclass -class QueuePackage: - """ - Data class which defines values that are pushed and popped off the global stack - """ - - measurement: str = None - time_field: datetime = None - field: str = None +from src.classes.custom_exceptions import MissingCredentialsError +from src.helpers.consts import MAX_PORT_RANGE class SecretStore: @@ -74,7 +63,10 @@ def _read_env_mqtt(self) -> dict: try: mqtt_port = int(os.environ.get("MQTT_PORT")) if mqtt_port not in range(0, MAX_PORT_RANGE): - raise ValueError( + logging.critical( + f"MQTT port outside maximum port range, 0-{MAX_PORT_RANGE}" + ) + raise MissingCredentialsError( f"MQTT port outside maximum port range, 0-{MAX_PORT_RANGE}" ) self._mqtt_secrets = { @@ -84,8 +76,9 @@ def _read_env_mqtt(self) -> dict: "mqtt_token": os.environ.get("MQTT_TOKEN"), "mqtt_topic": os.environ.get("MQTT_TOPIC"), } + assert None not in self._mqtt_secrets.values() assert "" not in self._mqtt_secrets.values() - except Exception as err: + except (AssertionError, TypeError) as err: logging.critical("Ran into error when reading environment variables") raise MissingCredentialsError( "Ran into error when reading environment variables" @@ -103,9 +96,21 @@ def _read_env_influx(self) -> dict: "influx_bucket": os.environ.get("INFLUX_BUCKET"), "influx_token": os.environ.get("INFLUX_TOKEN"), } + assert None not in self._influx_secrets.values() assert "" not in self._influx_secrets.values() - except Exception as err: + except AssertionError as err: logging.critical("Ran into error when reading environment variables") raise MissingCredentialsError( "Ran into error when reading environment variables" ) from err + + +@dataclass +class QueuePackage: + """ + Data class which defines values that are pushed and popped off the global stack + """ + + measurement: str = None + time_field: datetime = None + field: str = None diff --git a/classes/custom_exceptions.py b/src/classes/custom_exceptions.py similarity index 100% rename from classes/custom_exceptions.py rename to src/classes/custom_exceptions.py diff --git a/classes/influx_classes.py b/src/classes/influx_classes.py similarity index 98% rename from classes/influx_classes.py rename to src/classes/influx_classes.py index 4483623..26cc065 100644 --- a/classes/influx_classes.py +++ b/src/classes/influx_classes.py @@ -9,7 +9,7 @@ from influxdb_client import InfluxDBClient from influxdb_client.client.write_api import SYNCHRONOUS -from classes.common_classes import QueuePackage, SecretStore +from src.classes.common_classes import QueuePackage, SecretStore class InfluxConnector: diff --git a/classes/mqtt_classes.py b/src/classes/mqtt_classes.py similarity index 86% rename from classes/mqtt_classes.py rename to src/classes/mqtt_classes.py index 568af13..01eb6e8 100644 --- a/classes/mqtt_classes.py +++ b/src/classes/mqtt_classes.py @@ -6,6 +6,7 @@ """ import logging +import ssl import struct import time from dataclasses import dataclass @@ -15,8 +16,8 @@ from paho.mqtt.client import Client, MQTTMessage from pymate.matenet import DCStatusPacket, FXStatusPacket, MXStatusPacket -from classes.common_classes import QueuePackage, SecretStore -from classes.consts import QUEUE_WAIT_TIME, THREADED_QUEUE, TIME_PACKET_SIZE +from src.classes.common_classes import QueuePackage, SecretStore +from src.helpers.consts import QUEUE_WAIT_TIME, THREADED_QUEUE, TIME_PACKET_SIZE class PyMateDecoder: @@ -121,21 +122,37 @@ def __init__( self._mqtt_client = Client() @staticmethod - def _on_subscribe(_client, _userdata, _mid, granted_qos) -> None: + def _on_socket_open(_client, userdata, sock) -> None: + """ + Debug function for logging socket openings from MQTT + """ + logging.debug(f"Socket open debug args, {userdata}, {sock}") + + @staticmethod + def _on_socket_close(_client, userdata, sock) -> None: + """ + Debug function for logging socket closes from MQTT + """ + logging.debug(f"Socket close debug args, {userdata}, {sock}") + + @staticmethod + def _on_subscribe(_client, userdata, mid, granted_qos) -> None: """ Logs when the MQTT client calls on_subscribe """ logging.info("Subscribed to MQTT topic, _on_subscribe") logging.info(f"MQTT topic returns QoS level of {granted_qos}") + logging.debug(f"Subscribe debug args, {userdata}, {mid}, {granted_qos}") @staticmethod - def _on_unsubscribe(_client, _userdata, _mid) -> None: + def _on_unsubscribe(_client, userdata, mid) -> None: """ Logs when MQTT calls on_unsubscribe """ logging.info("Unsubscribed from MQTT topic, _on_unsubscribe") + logging.debug(f"Unsubscribe debug args, {userdata}, {mid}") - def _on_connect(self, _client, _userdata, _flags, return_code) -> None: + def _on_connect(self, _client, userdata, flags, return_code) -> None: """ Logs when MQTT calls on_connect The value of rc indicates success or not @@ -150,20 +167,22 @@ def _on_connect(self, _client, _userdata, _flags, return_code) -> None: } if return_code == 0: logging.info("Connecting to MQTT broker, _on_connect") - topic = f"{self._mqtt_secrets['mqtt_topic']}/#" + topic = f"{self._mqtt_secrets['mqtt_topic']}" self._mqtt_client.subscribe(topic=topic) else: logging.error( f"Couldn't connect to MQTT broker returned code: {return_code}\n" f"{return_codes[return_code]}" ) + logging.debug(f"Connect debug args, {userdata}, {flags}, {return_code}") @staticmethod - def _on_disconnect(_client, _userdata, _rc) -> None: + def _on_disconnect(_client, userdata, return_code) -> None: """ Logs when MQTT calls on_disconnect """ logging.warning("Disconnected from MQTT broker, _on_disconnect") + logging.debug(f"Disconnect debug args, {userdata}, {return_code}") def _check_status(self, msg: MQTTMessage) -> None: """ @@ -211,7 +230,7 @@ def _decode_message(self, msg: MQTTMessage) -> None: fx_online = self._status[MqttTopics.fx_status] mx_online = self._status[MqttTopics.mx_status] - if msg.topic == MqttTopics.dc_data and dc_online: + if msg.topic == MqttTopics.dc_data and dc_online == "online": logging.info(f"Received {MqttTopics.dc_name} data packet") logging.debug(f"{MqttTopics.dc_name} payload: {msg.payload}") # NOTE: Due to errors in our packet packing, it introduces a random buffer at the end @@ -228,7 +247,7 @@ def _decode_message(self, msg: MQTTMessage) -> None: measurement=MqttTopics.dc_name, time_field=dc_time, payload=dc_payload ) - if msg.topic == MqttTopics.fx_data and fx_online: + if msg.topic == MqttTopics.fx_data and fx_online == "online": logging.info(f"Received {MqttTopics.fx_name} data packet") logging.debug(f"{MqttTopics.fx_name} payload: {msg.payload}") # NOTE: Due to errors in our packet packing, it introduces a random buffer at the end @@ -245,7 +264,7 @@ def _decode_message(self, msg: MQTTMessage) -> None: measurement=MqttTopics.fx_name, time_field=fx_time, payload=fx_payload ) - if msg.topic == MqttTopics.mx_data and mx_online: + if msg.topic == MqttTopics.mx_data and mx_online == "online": logging.info(f"Received {MqttTopics.mx_name} data packet") logging.debug(f"{MqttTopics.mx_name} payload: {msg.payload}") # NOTE: Due to errors in our packet packing, it introduces a random buffer at the end @@ -291,8 +310,13 @@ def get_mqtt_client(self) -> Client: self._mqtt_client.on_disconnect = self._on_disconnect self._mqtt_client.on_unsubscribe = self._on_unsubscribe self._mqtt_client.on_subscribe = self._on_subscribe + self._mqtt_client.on_socket_open = self._on_socket_open + self._mqtt_client.on_socket_close = self._on_socket_close + + self._mqtt_client.tls_set(cert_reqs=ssl.CERT_NONE) + self._mqtt_client.tls_insecure_set(True) - self._mqtt_client.connect( + _ = self._mqtt_client.connect( host=self._mqtt_secrets["mqtt_host"], port=self._mqtt_secrets["mqtt_port"], ) diff --git a/classes/query_classes.py b/src/classes/query_classes.py similarity index 100% rename from classes/query_classes.py rename to src/classes/query_classes.py diff --git a/config/config.ini b/src/config/config.ini similarity index 84% rename from config/config.ini rename to src/config/config.ini index 39289c6..f95b0d5 100644 --- a/config/config.ini +++ b/src/config/config.ini @@ -6,7 +6,7 @@ file_logging = true log_rotation = size_based file_location = output/ file_name = influx_logs.log -format = %%(asctime)s, %%(name)s, %%(levelname)s, %%(message)s +format = %%(asctime)s, %%(name)s, %%(threadName)s, %%(levelname)s, %%(message)s dateformat = %%d/%%m/%%Y, %%H:%%M:%%S ; Rotating file loggers require the following configs max_file_no = 5 @@ -22,8 +22,9 @@ file_logging = true log_rotation = size_based file_location = output/ file_name = solar_logs.log -format = %%(asctime)s, %%(name)s, %%(levelname)s, %%(message)s +format = %%(asctime)s, %%(name)s, %%(threadName)s, %%(levelname)s, %%(message)s dateformat = %%d/%%m/%%Y, %%H:%%M:%%S +mode = "a" ; Rotating file loggers require the following configs max_file_no = 5 time_cutover = "midnight" diff --git a/src/helpers/__init__.py b/src/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/classes/consts.py b/src/helpers/consts.py similarity index 90% rename from classes/consts.py rename to src/helpers/consts.py index af58599..a459b1d 100644 --- a/classes/consts.py +++ b/src/helpers/consts.py @@ -5,7 +5,7 @@ from queue import Queue # Configs -CONFIG_FILENAME = "config/config.ini" # Py Functions +CONFIG_FILENAME = "src/config/config.ini" # Py Functions INFLUX_QUERY_CONFIG_TITLE = "query_settings" # Influx Query INFLUX_DEBUG_CONFIG_TITLE = "influx_debugger" # Influx Query SOLAR_DEBUG_CONFIG_TITLE = "solar_debugger" # Solar Runtime diff --git a/classes/py_functions.py b/src/helpers/py_functions.py similarity index 89% rename from classes/py_functions.py rename to src/helpers/py_functions.py index 8ca5d9d..786a12d 100644 --- a/classes/py_functions.py +++ b/src/helpers/py_functions.py @@ -2,12 +2,12 @@ Contains all functions that aren't directly correlated to Influx, MQTT, or logging """ -import configparser import csv import logging import os +from configparser import ConfigParser -from classes.consts import CONFIG_FILENAME +from src.helpers.consts import CONFIG_FILENAME def write_results_to_csv(config_name: str, table: dict) -> None: @@ -17,7 +17,7 @@ def write_results_to_csv(config_name: str, table: dict) -> None: :param table: Resultant CSV query from the Influx database """ try: - config_parser = configparser.ConfigParser() + config_parser = ConfigParser() config_parser.read(CONFIG_FILENAME) file_location = config_parser.get(config_name, "csv_location") filename = config_parser.get(config_name, "csv_name") @@ -40,6 +40,6 @@ def read_query_settings(config_name: str) -> any: :param config_name: Section under the config for the configuration to pull data from :return: Query variables """ - config_parser = configparser.ConfigParser() + config_parser = ConfigParser() config_parser.read(CONFIG_FILENAME) return config_parser.get(section=config_name, option="query_mode") diff --git a/src/helpers/py_logger.py b/src/helpers/py_logger.py new file mode 100644 index 0000000..682250c --- /dev/null +++ b/src/helpers/py_logger.py @@ -0,0 +1,203 @@ +""" +Contains all functions required to setup logging +""" + +import logging +import os +from configparser import ConfigParser +from logging import Logger, StreamHandler +from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler + +from src.classes.custom_exceptions import MissingConfigurationError +from src.helpers.consts import CONFIG_FILENAME, SOLAR_DEBUG_CONFIG_TITLE + + +class LoggingTools: + """ + Class contains all tools required to create loggers + """ + + def __init__( + self, config_name: str, logger: Logger, config_dir: str = CONFIG_FILENAME + ) -> None: + """ + Initialization of logging class + """ + self._debug_level = None + self._file_format = None + self._date_format = None + self._is_file_logging = None + self._log_rotation = None + self._file_location = None + self._file_path = None + self._max_file_bytes = None + self._max_file_no = None + self._mode = None + self._logger = logger + self._config_dir = config_dir + self._config_name = config_name + self._config_parser = ConfigParser() + + def create_loggers(self): + """ + Creates loggers from the given configs + """ + self._create_stdout_logger(logger=self._logger) + if self._is_file_logging and self._log_rotation == "size_based": + self._create_rotating_file_logger(logger=self._logger) + elif self._is_file_logging and self._log_rotation == "time_based": + self._create_timed_rotating_file_logger(logger=self._logger) + + def _read_basic_config(self) -> None: + try: + debug_dict = { + "DEBUG": logging.DEBUG, + "INFO": logging.INFO, + "WARNING": logging.WARNING, + "ERROR": logging.ERROR, + "CRITICAL": logging.CRITICAL, + } + self._debug_level = debug_dict[ + self._config_parser.get(self._config_name, "debug_level") + ] + self._file_format = self._config_parser.get(self._config_name, "format") + self._date_format = self._config_parser.get(self._config_name, "dateformat") + self._is_file_logging = self._config_parser.getboolean( + self._config_name, "file_logging" + ) + except Exception as err: + logging.critical("Failed to read basic logger configs") + raise MissingConfigurationError( + "Failed to read basic logger configs" + ) from err + + if None in [self._debug_level, self._file_format, self._date_format]: + logging.critical("Failed to read basic logger configs") + raise MissingConfigurationError("Failed to read basic logger configs") + + def _read_extra_configs(self) -> None: + try: + self._log_rotation = self._config_parser.get( + self._config_name, "log_rotation" + ) + + self._file_location = self._config_parser.get( + self._config_name, "file_location" + ) + file_name = self._config_parser.get(self._config_name, "file_name") + self._file_path = self._file_location + file_name + self._max_file_bytes = int( + self._config_parser.get(self._config_name, "max_file_bytes") + ) + self._max_file_no = int( + self._config_parser.get(self._config_name, "max_file_no") + ) + if self._config_name == SOLAR_DEBUG_CONFIG_TITLE: + self._mode = str(self._config_parser.get(self._config_name, "mode")) + except Exception as err: + logging.critical("Failed to read file logger settings in configs") + raise MissingConfigurationError( + "Failed to read file logger settings in configs" + ) from err + + if ( + None + in [ + self._log_rotation, + self._file_location, + self._file_path, + self._max_file_bytes, + self._max_file_no, + ] + or (self._config_name == SOLAR_DEBUG_CONFIG_TITLE and self._mode is None) + or self._mode in ["None", ""] + ): + logging.critical("Failed to read file logger settings in configs") + raise MissingConfigurationError( + "Failed to read file logger settings in configs" + ) + + def read_configs(self) -> None: + """ + Reads config file and parses and stores the result + """ + self._config_parser.read(self._config_dir) + self._read_basic_config() + if self._is_file_logging: + self._read_extra_configs() + + def _create_stdout_logger(self, logger: Logger) -> None: + """ + Creates a standard STDOUT logger + """ + logger.setLevel(self._debug_level) + stream_handler = StreamHandler() + stream_handler.setLevel(self._debug_level) + log_formatter = logging.Formatter( + fmt=self._file_format, datefmt=self._date_format + ) + stream_handler.setFormatter(log_formatter) + logger.addHandler(stream_handler) + logging.info("Created stdout logger") + + def _create_rotating_file_logger(self, logger: Logger) -> None: + """ + Creates a rotating file logger which limits log files size + and when exceeding that size, creates a new log file + """ + if not os.path.exists(self._file_location): + os.makedirs(self._file_location) # pragma: no cover + + rotating_handler = RotatingFileHandler( + filename=self._file_path, + maxBytes=self._max_file_bytes, + backupCount=self._max_file_no, + mode=self._mode, + ) + rotating_handler.setLevel(self._debug_level) + log_formatter = logging.Formatter( + fmt=self._file_format, datefmt=self._date_format + ) + rotating_handler.setFormatter(log_formatter) + logger.addHandler(rotating_handler) + logging.info(f"Created rotating file log file at {self._file_path}") + + def _create_timed_rotating_file_logger(self, logger: Logger) -> None: + """ + Creates a rotating file logger which limits log files size + and when exceeding that size, creates a new log file + """ + if not os.path.exists(self._file_location): + os.makedirs(self._file_location) # pragma: no cover + rotating_time_handler = TimedRotatingFileHandler( + filename=self._file_path, + when="midnight", + backupCount=self._max_file_no, + ) + rotating_time_handler.suffix = "%Y-%m-%d" + rotating_time_handler.setLevel(self._debug_level) + log_formatter = logging.Formatter( + fmt=self._file_format, + datefmt=self._date_format, + ) + rotating_time_handler.setFormatter(log_formatter) + logger.addHandler(rotating_time_handler) + logging.info(f"Created time rotating file log file at {self._file_path}") + + +def create_logger( + config_name: str, config_dir: str = CONFIG_FILENAME +) -> Logger: # pragma: no cover + """ + Creates a logging instance, can be customized through the config.ini + :param config_name: Section under the config for the configuration to pull data from + :return: Logger for logging + """ + + logger = logging.getLogger() + logging_tools = LoggingTools( + config_name=config_name, config_dir=config_dir, logger=logger + ) + logging_tools.read_configs() + logging_tools.create_loggers() + return logger diff --git a/start_logger.py b/start_logger.py new file mode 100644 index 0000000..672a8f7 --- /dev/null +++ b/start_logger.py @@ -0,0 +1,8 @@ +#! /usr/bin/python3 +""" +Main run file for Solar Logger +""" +from src.app.solar_main import main + +if __name__ == "__main__": + main() diff --git a/start_query.py b/start_query.py new file mode 100644 index 0000000..65068a7 --- /dev/null +++ b/start_query.py @@ -0,0 +1,9 @@ +#! /usr/bin/python3 +""" +Main run file for Influx Query +""" + +from src.app.influx_query import main + +if __name__ == "__main__": + main() diff --git a/tests/app/test_influx_query.py b/tests/app/test_influx_query.py index 6ee08f6..434270f 100644 --- a/tests/app/test_influx_query.py +++ b/tests/app/test_influx_query.py @@ -2,6 +2,9 @@ import pytest -@pytest.mark.skip("Influx Query testing not implemented yet") -def test_todo(): - raise NotImplementedError +class TestQueryParser: + """Test class for Query Parser""" + + @pytest.mark.skip("Influx Query testing not implemented yet") + def test_todo(self): + raise NotImplementedError diff --git a/tests/app/test_solar_main.py b/tests/app/test_solar_main.py index 563d71c..f933764 100644 --- a/tests/app/test_solar_main.py +++ b/tests/app/test_solar_main.py @@ -2,6 +2,9 @@ import pytest -@pytest.mark.skip("Solar Logger testing not implemented yet") -def test_todo(): - raise NotImplementedError +class TestThreadRunner: + """Test class for Thread Runner""" + + @pytest.mark.skip("Solar Logger testing not implemented yet") + def test_todo(self): + raise NotImplementedError diff --git a/tests/classes/test_common_classes.py b/tests/classes/test_common_classes.py index 42f34cd..4e2c7a1 100644 --- a/tests/classes/test_common_classes.py +++ b/tests/classes/test_common_classes.py @@ -1,87 +1,131 @@ # pylint: disable=missing-function-docstring, missing-module-docstring import logging import os -from sys import platform -from unittest import mock -import pytest -from pytest import LogCaptureFixture +from pytest import LogCaptureFixture, raises +from pytest_mock import MockerFixture + +from src.classes.common_classes import SecretStore +from src.classes.custom_exceptions import MissingCredentialsError +from tests.config.consts import ( + TEST_EMPTY_ENV, + TEST_ENV_FULL, + TEST_INFLUX_ENV, + TEST_MAX_PORT_RANGE, + TEST_MQTT_ENV, +) -from classes.common_classes import SecretStore -from classes.custom_exceptions import MissingCredentialsError -from tests.config.consts import TEST_ENV_FULL, TEST_INFLUX_ENV, TEST_MQTT_ENV +class TestSecretStore: + """Test class for the Secret Store""" -@pytest.mark.skipif( - platform == "linux", - reason="test_passes_secret_store_reads_env currently doesn't work on Linux due to patching os.enviro", -) -def test_passes_secret_store_reads_env(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) + def test_passes_secret_store_reads_full_env( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + full_env_copy = TEST_ENV_FULL.copy() + full_env_copy["MQTT_PORT"] = int(TEST_ENV_FULL["MQTT_PORT"]) + full_env_copy = {key.lower(): value for key, value in full_env_copy.items()} + mocker.patch.dict(os.environ, TEST_ENV_FULL) - with mock.patch.dict(os.environ, TEST_ENV_FULL): secret_store = SecretStore(has_mqtt_access=True, has_influx_access=True) - joined_secret_store = dict(secret_store.influx_secrets, **secret_store.mqtt_secrets) - test_env_copy = TEST_ENV_FULL.copy() - test_env_copy["mqtt_port"] = int(TEST_ENV_FULL["mqtt_port"]) + joined_secret_store = dict( + secret_store.influx_secrets, **secret_store.mqtt_secrets + ) + + assert "Reading MQTT environment variables" in caplog.text + assert "Reading Influx environment variables" in caplog.text + assert joined_secret_store == full_env_copy + + def test_passes_secret_store_reads_mqtt_env( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + mqtt_env_copy = TEST_MQTT_ENV.copy() + mqtt_env_copy["MQTT_PORT"] = int(TEST_MQTT_ENV["MQTT_PORT"]) + mqtt_env_copy = {key.lower(): value for key, value in mqtt_env_copy.items()} + mocker.patch.dict(os.environ, TEST_MQTT_ENV) - assert "Reading MQTT environment variables" in caplog.text - assert "Reading Influx environment variables" in caplog.text - assert joined_secret_store == test_env_copy + secret_store = SecretStore(has_mqtt_access=True) + assert "Reading MQTT environment variables" in caplog.text + assert "Reading Influx environment variables" not in caplog.text + assert secret_store.mqtt_secrets == mqtt_env_copy -@pytest.mark.skipif( - platform == "linux", - reason="test_passes_secret_store_reads_mqtt_env currently doesn't work on Linux due to patching os.enviro", -) -def test_passes_secret_store_reads_mqtt_env(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) + def test_passes_secret_store_reads_influx_env( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + influx_env_copy = TEST_INFLUX_ENV.copy() + influx_env_copy = {key.lower(): value for key, value in influx_env_copy.items()} + mocker.patch.dict(os.environ, TEST_INFLUX_ENV) - with mock.patch.dict(os.environ, TEST_ENV_FULL): - secret_store = SecretStore(has_mqtt_access=True) - mqtt_env_copy = TEST_MQTT_ENV.copy() - mqtt_env_copy["mqtt_port"] = int(TEST_MQTT_ENV["mqtt_port"]) + secret_store = SecretStore(has_influx_access=True) - assert "Reading MQTT environment variables" in caplog.text - assert "Reading Influx environment variables" not in caplog.text - assert secret_store.mqtt_secrets == mqtt_env_copy + assert "Reading MQTT environment variables" not in caplog.text + assert "Reading Influx environment variables" in caplog.text + assert secret_store.influx_secrets == influx_env_copy + def test_fails_mqtt_env_contains_bad_port( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.CRITICAL) + mqtt_env_copy = TEST_MQTT_ENV.copy() + mqtt_env_copy["MQTT_PORT"] = str(TEST_MAX_PORT_RANGE + 5) + mocker.patch.dict(os.environ, mqtt_env_copy) -@pytest.mark.skipif( - platform == "linux", - reason="test_passes_secret_store_reads_influx_env currently doesn't work on Linux due to patching os.environ", -) -def test_passes_secret_store_reads_influx_env(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) + with raises(MissingCredentialsError): + _ = SecretStore(has_mqtt_access=True) - with mock.patch.dict(os.environ, TEST_ENV_FULL): - secret_store = SecretStore(has_influx_access=True) + assert ( + f"MQTT port outside maximum port range, 0-{TEST_MAX_PORT_RANGE}" + in caplog.text + ) + + def test_fails_secret_store_reads_none( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + mocker.patch.dict(os.environ, TEST_ENV_FULL) - assert "Reading MQTT environment variables" not in caplog.text - assert "Reading Influx environment variables" in caplog.text - assert secret_store.influx_secrets == TEST_INFLUX_ENV + _ = SecretStore() + assert "Reading MQTT environment variables" not in caplog.text + assert "Reading Influx environment variables" not in caplog.text -def test_fails_secret_store_reads_none(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) + def test_fails_secret_store_reads_empty_mqtt_env( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.CRITICAL) + mocker.patch.dict(os.environ, TEST_EMPTY_ENV) - with mock.patch.dict(os.environ, TEST_ENV_FULL): - _ = SecretStore() + with raises(MissingCredentialsError): + _ = SecretStore(has_mqtt_access=True) + + assert "Ran into error when reading environment variables" in caplog.text - assert "Reading MQTT environment variables" not in caplog.text - assert "Reading Influx environment variables" not in caplog.text + def test_fails_secret_store_reads_empty_influx_env( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.CRITICAL) + mocker.patch.dict(os.environ, TEST_EMPTY_ENV) + with raises(MissingCredentialsError): + _ = SecretStore(has_influx_access=True) -def test_fails_secret_store_empty_values(caplog: LogCaptureFixture): - caplog.set_level(logging.CRITICAL) + assert "Ran into error when reading environment variables" in caplog.text - mqtt_env_copy = TEST_ENV_FULL.copy() - mqtt_env_copy["mqtt_user"] = "" - mqtt_env_copy["influx_url"] = "" + def test_fails_secret_store_empty_values( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + caplog.set_level(logging.CRITICAL) + mqtt_env_copy = TEST_ENV_FULL.copy() + mqtt_env_copy["MQTT_USER"] = "" + mqtt_env_copy["INFLUX_URL"] = "" + mocker.patch.dict(os.environ, mqtt_env_copy) - with mock.patch.dict(os.environ, mqtt_env_copy): - with pytest.raises(MissingCredentialsError) as err: + with raises(MissingCredentialsError) as err: _ = SecretStore(has_mqtt_access=True, has_influx_access=True) - assert "Ran into error when reading environment variables" in caplog.text - assert str(err.value) == "Ran into error when reading environment variables" + assert "Ran into error when reading environment variables" in caplog.text + assert str(err.value) == "Ran into error when reading environment variables" diff --git a/tests/classes/test_influx_classes.py b/tests/classes/test_influx_classes.py index 4a8cdff..e503f2d 100644 --- a/tests/classes/test_influx_classes.py +++ b/tests/classes/test_influx_classes.py @@ -1,118 +1,116 @@ -# pylint: disable=missing-function-docstring, missing-module-docstring, missing-class-docstring - - +# pylint: disable=missing-function-docstring, missing-module-docstring import logging -from unittest import mock -import pytest from influxdb_client import QueryApi, WriteApi -from pytest import LogCaptureFixture +from pytest import LogCaptureFixture, mark, raises +from pytest_mock import MockerFixture -from classes.common_classes import QueuePackage -from classes.influx_classes import InfluxConnector +from src.classes.common_classes import QueuePackage +from src.classes.influx_classes import InfluxConnector from tests.config.consts import FAKE, TestSecretStore -def test_passes_connector_init(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) - - _ = InfluxConnector(secret_store=TestSecretStore) - - assert "Initializing InfluxDB client" in caplog.text - assert "Initializing Influx write api" in caplog.text - assert "Initializing Influx query api" in caplog.text +class TestInfluxConnector: + """Test class for Influx Connector""" + def test_passes_connector_init(self, caplog: LogCaptureFixture): + caplog.set_level(logging.INFO) -def test_passes_health_check(caplog: LogCaptureFixture): - caplog.set_level(logging.INFO) - influx_connector = InfluxConnector(secret_store=TestSecretStore) - - with mock.patch("classes.influx_classes.InfluxDBClient.ready"): - influx_connector.health_check() + _ = InfluxConnector(secret_store=TestSecretStore) + assert "Initializing InfluxDB client" in caplog.text + assert "Initializing Influx write api" in caplog.text + assert "Initializing Influx query api" in caplog.text -def test_fails_health_check_raises_exception(caplog: LogCaptureFixture): - caplog.set_level(logging.ERROR) - influx_connector = InfluxConnector(secret_store=TestSecretStore) + def test_passes_health_check( + self, + mocker: MockerFixture, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + influx_connector = InfluxConnector(secret_store=TestSecretStore) + mocker.patch("src.classes.influx_classes.InfluxDBClient.ready") - with mock.patch( - "classes.influx_classes.InfluxDBClient.ready", side_effect=Exception - ) and pytest.raises(Exception): influx_connector.health_check() + def test_passes_write_points( + self, mocker: MockerFixture, caplog: LogCaptureFixture + ): + write_api = mocker.patch("src.classes.influx_classes.InfluxDBClient.write_api") + write_api.return_value = mocker.MagicMock(WriteApi, return_value=None) + caplog.set_level(logging.DEBUG) + influx_connector = InfluxConnector(secret_store=TestSecretStore) + queue_package = QueuePackage( + measurement=FAKE.pystr(), + time_field=FAKE.date_time(), + field={FAKE.pystr(): FAKE.pyfloat(4)}, + ) -@mock.patch("classes.influx_classes.InfluxDBClient.write_api") -def test_passes_write_points(write_api, caplog: LogCaptureFixture): - write_api.return_value = mock.MagicMock(WriteApi, return_value=None) - caplog.set_level(logging.DEBUG) - influx_connector = InfluxConnector(secret_store=TestSecretStore) - queue_package = QueuePackage( - measurement=FAKE.pystr(), - time_field=FAKE.date_time(), - field={FAKE.pystr(): FAKE.pyfloat(4)}, - ) - - influx_connector.write_points(queue_package=queue_package) - + influx_connector.write_points(queue_package=queue_package) -@pytest.mark.parametrize( - "queue_package, error_message", - [ - [None, "The received queue_packed has malformed data: queue_package empty"], - [ - QueuePackage( - measurement=None, # Bad data - time_field=FAKE.date_time(), - field={FAKE.pystr(): FAKE.pyfloat(4)}, - ), - "The received queue_packed has malformed data: type of measurement not str", - ], + @mark.parametrize( + "queue_package, error_message", [ - QueuePackage( - measurement=FAKE.pystr(), - time_field=None, # Bad data - field={FAKE.pystr(): FAKE.pyfloat(4)}, - ), - "The received queue_packed has malformed data: type of time_field not, datetime", + [None, "The received queue_packed has malformed data: queue_package empty"], + [ + QueuePackage( + measurement=None, # Bad data + time_field=FAKE.date_time(), + field={FAKE.pystr(): FAKE.pyfloat(4)}, + ), + "The received queue_packed has malformed data: type of measurement not str", + ], + [ + QueuePackage( + measurement=FAKE.pystr(), + time_field=None, # Bad data + field={FAKE.pystr(): FAKE.pyfloat(4)}, + ), + "The received queue_packed has malformed data: type of time_field not, datetime", + ], + [ + QueuePackage( + measurement=FAKE.pystr(), + time_field=FAKE.date_time(), + field=None, # Bad data + ), + "The received queue_packed has malformed data: type of field not, dict | str", + ], ], - [ - QueuePackage( - measurement=FAKE.pystr(), - time_field=FAKE.date_time(), - field=None, # Bad data - ), - "The received queue_packed has malformed data: type of field not, dict | str", - ], - ], -) -@mock.patch("classes.influx_classes.InfluxDBClient.write_api") -def test_fails_write_points_bad_data( - write_api, queue_package, error_message, caplog: LogCaptureFixture -): - write_api.return_value = mock.MagicMock(WriteApi, return_value=None) - caplog.set_level(logging.DEBUG) - influx_connector = InfluxConnector(secret_store=TestSecretStore) - - with pytest.raises(Exception) as err: - influx_connector.write_points(queue_package=queue_package) - assert str(err.value) == error_message - - -@pytest.mark.parametrize("query_mode", ["csv", "flux", "stream"]) -@mock.patch("classes.influx_classes.InfluxDBClient.query_api") -def test_passes_query_database_modes_return( - query_api, query_mode, caplog: LogCaptureFixture -): - query_api.return_value = mock.MagicMock(QueryApi) - queue_package = FAKE.pystr() - query_api.return_value.query_csv.return_value = queue_package - query_api.return_value.query.return_value = queue_package - query_api.return_value.query_stream.return_value = queue_package - caplog.set_level(logging.DEBUG) - - caplog.set_level(logging.DEBUG) - influx_connector = InfluxConnector(secret_store=TestSecretStore) - result = influx_connector.query_database(query_mode=query_mode, query=queue_package) - - assert result is queue_package - assert "Query to Influx server was successful" in caplog.text + ) + def test_fails_write_points_bad_data( + self, + mocker: MockerFixture, + queue_package: str, + error_message: str, + caplog: LogCaptureFixture, + ): + write_api = mocker.patch("src.classes.influx_classes.InfluxDBClient.write_api") + write_api.return_value = mocker.MagicMock(WriteApi, return_value=None) + caplog.set_level(logging.DEBUG) + influx_connector = InfluxConnector(secret_store=TestSecretStore) + + with raises(Exception) as err: + influx_connector.write_points(queue_package=queue_package) + assert str(err.value) == error_message + + @mark.parametrize("query_mode", ["csv", "flux", "stream"]) + def test_passes_query_database_modes_return( + self, mocker: MockerFixture, query_mode: str, caplog: LogCaptureFixture + ): + query_api = mocker.patch("src.classes.influx_classes.InfluxDBClient.query_api") + query_api.return_value = mocker.MagicMock(QueryApi) + queue_package = FAKE.pystr() + query_api.return_value.query_csv.return_value = queue_package + query_api.return_value.query.return_value = queue_package + query_api.return_value.query_stream.return_value = queue_package + caplog.set_level(logging.DEBUG) + + caplog.set_level(logging.DEBUG) + influx_connector = InfluxConnector(secret_store=TestSecretStore) + result = influx_connector.query_database( + query_mode=query_mode, query=queue_package + ) + + assert result is queue_package + assert "Query to Influx server was successful" in caplog.text diff --git a/tests/classes/test_mqtt_classes.py b/tests/classes/test_mqtt_classes.py index 2d99c44..ada4435 100644 --- a/tests/classes/test_mqtt_classes.py +++ b/tests/classes/test_mqtt_classes.py @@ -1,12 +1,28 @@ -# pylint: disable=missing-function-docstring, missing-module-docstring, missing-class-docstring +# pylint: disable=missing-function-docstring, missing-module-docstring, redefined-outer-name, protected-access, duplicate-code +import logging +from datetime import datetime -from unittest import mock - -from paho.mqtt.client import Client +from paho.mqtt.client import Client, MQTTMessage from pymate.value import Value +from pytest import LogCaptureFixture, fixture, mark, raises +from pytest_mock import MockerFixture + +from src.classes.common_classes import QueuePackage +from src.classes.mqtt_classes import MqttConnector, MqttTopics, PyMateDecoder +from src.helpers.consts import THREADED_QUEUE +from tests.config.consts import ( + FAKE, + TEST_MAX_QUEUE_LENGTH, + TestDC, + TestFX, + TestMqttTopics, + TestMX, + TestSecretStore, +) + -from classes.mqtt_classes import MqttConnector, PyMateDecoder -from tests.config.consts import TestSecretStore +class TestError(Exception): + """Custom exception for use in tests""" def dict_to_str(dictionary: dict): @@ -16,112 +32,529 @@ def dict_to_str(dictionary: dict): return result -def test_passes_fx_decoder(): - fx_bytearray = b"\x00\x00\x00\x04t\x00\x04\x00\x02\x01\x12\t\x00" - fx_array = { - "ac_mode": 2, - "aux_on": False, - "battery_voltage": "27.4V", - "buy_current": "0.0A", - "chg_current": "0.0A", - "error_mode": 0, - "input_voltage": "8V", - "inverter_current": "0.0A", - "is_230v": True, - "misc": 9, - "operational_mode": 4, - "output_voltage": "232V", - "sell_current": "0.0A", - "warnings": 0, - } - - pymate_decoder = PyMateDecoder() - decoded_result = pymate_decoder.fx_decoder(fx_bytearray) - str_decoded_result = dict_to_str(decoded_result) - str_fx_array = dict_to_str(fx_array) - - assert isinstance(decoded_result["output_voltage"], Value) - assert str_decoded_result == str_fx_array - - -def test_passes_mx_decoder(): - mx_bytearray = b"\x87\x85\x8b\x00t\x08\x02\x00 \x01\x0f\x02\xa4" - mx_array = { - "amp_hours": "116Ah", - "aux_mode": 8, - "aux_state": False, - "bat_current": "11.7A", - "bat_voltage": "27.1V", - "errors": 0, - "kilowatt_hours": "3.2kWh", - "pv_current": "5A", - "pv_voltage": "67.6V", - "status": 2, - } - - pymate_decoder = PyMateDecoder() - decoded_result = pymate_decoder.mx_decoder(mx_bytearray) - str_decoded_result = dict_to_str(decoded_result) - str_mx_array = dict_to_str(mx_array) - - assert isinstance(decoded_result["amp_hours"], Value) - assert str_decoded_result == str_mx_array - - -def test_passes_dc_decoder(): - dc_bytearray = ( - b"\xff\xe8\x00l\x00\x00\x01\x11d\xff\xf9\x00\x1d\x00\x00\x00!\x00l" - b"\x00\x18\x00T\x00\x1d\x00\x07\x00\x16\x00\x1b\x00\x0e\x00\r\x00J\x00\x1f\x00+" - b"\x00\x0b\x00\x03\x00\t\x00\x0c\x00\x00\x00\x04\x00\x04\xff\xf7\x00\x0c\x00\x00" - b"\xff\xfc\x00\x04\x00\x00c\x00\x00\x00\x02\x15\x00\x00\x00\x00\x00" +@fixture +def mqtt_fixture(): + mqtt_connector = MqttConnector(secret_store=TestSecretStore) + return mqtt_connector + + +def create_mqtt_message(mocker: MockerFixture, topic: str, payload: str) -> MQTTMessage: + mqtt_message = mocker.MagicMock(MQTTMessage) + mqtt_message.topic = topic + mqtt_message.payload = bytes(payload, "ascii") + return mqtt_message + + +def setup_service_status(mqtt_fixture: MqttConnector, status: str) -> None: + status_values = [ + TestMqttTopics.mate_status, + TestMqttTopics.dc_status, + TestMqttTopics.fx_status, + TestMqttTopics.mx_status, + ] + for value in status_values: + mqtt_fixture._status[value] = status + + +class TestPyMateDecoder: + """Test class for PyMate Decoder""" + + def test_passes_detach_time(self): + pymate_decoder = PyMateDecoder() + result = pymate_decoder.detach_time(msg=TestFX.bytearray, padding_at_end=2) + + assert result == (67108864, b"t\x00\x04\x00\x02\x01\x12") + + def test_passes_dc_decoder(self): + pymate_decoder = PyMateDecoder() + decoded_result = pymate_decoder.dc_decoder(TestDC.bytearray) + str_decoded_result = dict_to_str(decoded_result) + str_dc_array = dict_to_str(TestDC.array) + + assert isinstance(decoded_result["bat_current"], Value) + assert str_decoded_result == str_dc_array + + def test_passes_fx_decoder(self): + pymate_decoder = PyMateDecoder() + decoded_result = pymate_decoder.fx_decoder(TestFX.bytearray) + str_decoded_result = dict_to_str(decoded_result) + str_fx_array = dict_to_str(TestFX.array) + + assert isinstance(decoded_result["output_voltage"], Value) + assert str_decoded_result == str_fx_array + + def test_passes_mx_decoder(self): + pymate_decoder = PyMateDecoder() + decoded_result = pymate_decoder.mx_decoder(TestMX.bytearray) + str_decoded_result = dict_to_str(decoded_result) + str_mx_array = dict_to_str(TestMX.array) + + assert isinstance(decoded_result["amp_hours"], Value) + assert str_decoded_result == str_mx_array + + +def test_mqtt_topics_consistent(): + def get_custom_attributes(cls): + return {func: getattr(cls, func) for func in dir(cls) if func[0] != "_"} + + defined_vars = get_custom_attributes(MqttTopics) + test_dict = get_custom_attributes(TestMqttTopics) + assert defined_vars == test_dict + + +class TestMqttConnector: + """Test class for MQTT Connector""" + + def test_logs_on_socket_open( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.DEBUG) + userdata = FAKE.pystr() + sock = FAKE.pystr() + + mqtt_fixture._on_socket_open(_client=FAKE.pystr(), userdata=userdata, sock=sock) + + assert f"Socket open debug args, {userdata}, {sock}" in caplog.text + + def test_logs_on_socket_close( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.DEBUG) + userdata = FAKE.pystr() + sock = FAKE.pystr() + + mqtt_fixture._on_socket_close( + _client=FAKE.pystr(), userdata=userdata, sock=sock + ) + + assert f"Socket close debug args, {userdata}, {sock}" in caplog.text + + def test_logs_on_subscribe( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.DEBUG) + granted_qos = FAKE.pyint() + mid = FAKE.pystr() + userdata = FAKE.pystr() + + mqtt_fixture._on_subscribe( + _client=FAKE.pystr(), userdata=userdata, mid=mid, granted_qos=granted_qos + ) + + assert "Subscribed to MQTT topic" in caplog.text + assert f"MQTT topic returns QoS level of {granted_qos}" in caplog.text + assert f"Subscribe debug args, {userdata}, {mid}, {granted_qos}" in caplog.text + + def test_logs_on_unsubscribe( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.DEBUG) + mid = FAKE.pystr() + userdata = FAKE.pystr() + + mqtt_fixture._on_unsubscribe(_client=FAKE.pystr(), userdata=userdata, mid=mid) + + assert "Unsubscribed from MQTT topic" in caplog.text + assert f"Unsubscribe debug args, {userdata}, {mid}" in caplog.text + + def test_on_connect_calls_subscribe( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + subscribe = mocker.patch("src.classes.mqtt_classes.Client.subscribe") + userdata = FAKE.pystr() + flags = FAKE.pystr() + return_code = 0 + + mqtt_fixture._on_connect( + _client=FAKE.pystr(), + userdata=userdata, + flags=flags, + return_code=return_code, + ) + + subscribe.assert_called_once_with( + topic=f"{TestSecretStore.mqtt_secrets['mqtt_topic']}" + ) + assert "Connecting to MQTT broker" in caplog.text + + @mark.parametrize("return_code", [1, 2, 3, 4, 5]) + def test_on_connect_fails_with_bad_return_code( + self, + mqtt_fixture: MqttConnector, + return_code: int, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.DEBUG) + userdata = FAKE.pystr() + flags = FAKE.pystr() + return_codes = { + 0: "Connection successful", + 1: "Connection refused - incorrect protocol version", + 2: "Connection refused - invalid client identifier", + 3: "Connection refused - server unavailable", + 4: "Connection refused - bad username or password", + 5: "Connection refused - not authorized", + } + + mqtt_fixture._on_connect( + _client=FAKE.pystr(), + userdata=userdata, + flags=flags, + return_code=return_code, + ) + + assert ( + f"Couldn't connect to MQTT broker returned code: {return_code}\n" + f"{return_codes[return_code]}" + ) in caplog.text + assert f"Connect debug args, {userdata}, {flags}, {return_code}" in caplog.text + + def test_logs_on_disconnect( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.DEBUG) + userdata = FAKE.pystr() + return_code = FAKE.pyint() + + mqtt_fixture._on_disconnect( + _client=FAKE.pystr(), userdata=userdata, return_code=return_code + ) + + assert "Disconnected from MQTT broker" in caplog.text + assert f"Disconnect debug args, {userdata}, {return_code}" in caplog.text + + @mark.parametrize( + "status", + [ + "online", + "offline", + ], + ) + @mark.parametrize( + "topic", + [ + TestMqttTopics.mate_status, + TestMqttTopics.dc_status, + TestMqttTopics.fx_status, + TestMqttTopics.mx_status, + ], + ) + def test_check_status_goes_offline( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + topic: str, + status: str, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + # Force everything online and test the change to offline + if status == "online": + setup_service_status(mqtt_fixture=mqtt_fixture, status="offline") + else: + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message(mocker=mocker, topic=topic, payload=status) + + mqtt_fixture._check_status(msg=mqtt_message) + + if status == "online": + assert f"{topic} is now {status}" in caplog.text + assert f"{topic} has gone {status}" not in caplog.text + else: + assert f"{topic} is now {status}" not in caplog.text + assert f"{topic} has gone {status}" in caplog.text + assert mqtt_fixture._status[topic] == status + + def test_passes_load_queue( + self, mqtt_fixture: MqttConnector, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + measurement = FAKE.pystr() + time_field = FAKE.date() + payload_key = FAKE.pystr() + payload_value = FAKE.pyfloat() + + mqtt_fixture._load_queue( + measurement=measurement, + time_field=time_field, + payload={ + payload_key: str(payload_value), + }, + ) + + assert QueuePackage( + measurement=measurement, + time_field=time_field, + field={payload_key: float(payload_value)}, + ) == THREADED_QUEUE.get(timeout=5) + assert "Pushed items onto queue, queue now has 1 items" in caplog.text + + def test_waits_on_max_queue( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + measurement = FAKE.pystr() + time_field = FAKE.date() + payload_key = FAKE.pystr() + payload_value = FAKE.pyfloat() + + test_queue = [] + mocker.patch("src.classes.mqtt_classes.time.sleep", side_effect=TestError) + with raises(TestError): + for _ in range(0, TEST_MAX_QUEUE_LENGTH + 1): + mqtt_fixture._load_queue( + measurement=measurement, + time_field=time_field, + payload={ + payload_key: str(payload_value), + }, + ) + test_queue.append( + QueuePackage( + measurement=measurement, + time_field=time_field, + field={payload_key: float(payload_value)}, + ) + ) + + result_queue = [] + while not THREADED_QUEUE.empty(): + result_queue.append(THREADED_QUEUE.get(timeout=1)) + zipped_queues = zip(test_queue, result_queue) + + for test_item, result_item in zipped_queues: + assert test_item == result_item + assert ( + f"Pushed items onto queue, queue now has {TEST_MAX_QUEUE_LENGTH - 1} items" + in caplog.text + ) + + @mark.parametrize( + "message_type", + [TestMqttTopics.dc_data, TestMqttTopics.fx_data, TestMqttTopics.mx_data], ) - dc_array = { - "bat_ah_today": "13Ah", - "bat_current": "8.4A", - "bat_kwh_today": "0.43kWh", - "bat_net_ah": "0Ah", - "bat_net_kwh": "0.02kWh", - "bat_power": "0.22kW", - "bat_voltage": "27.3V", - "days_since_full": "1.1days", - "flags": 33, - "in_ah_today": "27Ah", - "in_current": "10.8A", - "in_kwh_today": "0.74kWh", - "in_power": "0.29kW", - "min_soc_today": "99%", - "out_ah_today": "14Ah", - "out_current": "2.4A", - "out_kwh_today": "0.31kWh", - "out_power": "0.07kW", - "shunta_ah_today": "-4Ah", - "shunta_current": "-2.4A", - "shunta_kwh_today": "-0.09kWh", - "shunta_power": "-0.07kW", - "shuntb_ah_today": "4Ah", - "shuntb_current": "10.8A", - "shuntb_kwh_today": "0.12kWh", - "shuntb_power": "0.29kW", - "shuntc_ah_today": "0Ah", - "shuntc_current": "0.0A", - "shuntc_kwh_today": "0.00kWh", - "shuntc_power": "0.00kW", - "state_of_charge": "100%", - } - - pymate_decoder = PyMateDecoder() - decoded_result = pymate_decoder.dc_decoder(dc_bytearray) - str_decoded_result = dict_to_str(decoded_result) - str_dc_array = dict_to_str(dc_array) - - assert isinstance(decoded_result["bat_current"], Value) - assert str_decoded_result == str_dc_array - - -def test_passes_get_mqtt_client(): - mqtt_connector = MqttConnector(TestSecretStore) - - with mock.patch("classes.mqtt_classes.Client.connect"): + def test_no_messsage_decoding_when_offline( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + message_type: str, + ): + payload = FAKE.pystr() + detach_time = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.detach_time") + detach_time.side_effect = AssertionError + setup_service_status(mqtt_fixture=mqtt_fixture, status="offline") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=message_type, payload=payload + ) + + mqtt_fixture._decode_message(msg=mqtt_message) + + detach_time.assert_not_called() + + def test_passes_decode_message_dc( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.DEBUG) + msg_time = FAKE.unix_time() + msg_timestamp = datetime.fromtimestamp(msg_time) + payload = FAKE.pystr() + detach_time = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.detach_time") + load_queue = mocker.patch("src.classes.mqtt_classes.MqttConnector._load_queue") + detach_time.return_value = (msg_time, FAKE.pystr()) + dc_decoder = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.dc_decoder") + dc_decoder.return_value = payload + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.dc_data, payload=payload + ) + + mqtt_fixture._decode_message(msg=mqtt_message) + + detach_time.assert_called_once() + dc_decoder.assert_called_once() + assert f"Received {TestMqttTopics.dc_name} data packet" in caplog.text + assert f"{TestMqttTopics.dc_name} payload: {payload}" in caplog.text + assert ( + f"Decoded and split {TestMqttTopics.dc_name} payload: {payload} at {msg_timestamp}" + in caplog.text + ) + load_queue.assert_called_with( + measurement=TestMqttTopics.dc_name, + time_field=msg_timestamp, + payload=payload, + ) + + def test_passes_decode_message_fx( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.DEBUG) + + msg_time = FAKE.unix_time() + msg_timestamp = datetime.fromtimestamp(msg_time) + payload = FAKE.pystr() + detach_time = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.detach_time") + load_queue = mocker.patch("src.classes.mqtt_classes.MqttConnector._load_queue") + detach_time.return_value = (msg_time, FAKE.pystr()) + fx_decoder = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.fx_decoder") + fx_decoder.return_value = payload + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.fx_data, payload=payload + ) + + mqtt_fixture._decode_message(msg=mqtt_message) + + detach_time.assert_called_once() + fx_decoder.assert_called_once() + assert f"Received {TestMqttTopics.fx_name} data packet" in caplog.text + assert f"{TestMqttTopics.fx_name} payload: {payload}" in caplog.text + assert ( + f"Decoded and split {TestMqttTopics.fx_name} payload: {payload} at {msg_timestamp}" + in caplog.text + ) + load_queue.assert_called_with( + measurement=TestMqttTopics.fx_name, + time_field=msg_timestamp, + payload=payload, + ) + + def test_passes_decode_message_mx( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.DEBUG) + + msg_time = FAKE.unix_time() + msg_timestamp = datetime.fromtimestamp(msg_time) + payload = FAKE.pystr() + detach_time = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.detach_time") + load_queue = mocker.patch("src.classes.mqtt_classes.MqttConnector._load_queue") + detach_time.return_value = (msg_time, FAKE.pystr()) + mx_decoder = mocker.patch("src.classes.mqtt_classes.PyMateDecoder.mx_decoder") + mx_decoder.return_value = payload + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.mx_data, payload=payload + ) + + mqtt_fixture._decode_message(msg=mqtt_message) + + detach_time.assert_called_once() + mx_decoder.assert_called_once() + assert f"Received {TestMqttTopics.mx_name} data packet" in caplog.text + assert f"{TestMqttTopics.mx_name} payload: {payload}" in caplog.text + assert ( + f"Decoded and split {TestMqttTopics.mx_name} payload: {payload} at {msg_timestamp}" + in caplog.text + ) + load_queue.assert_called_with( + measurement=TestMqttTopics.mx_name, + time_field=msg_timestamp, + payload=payload, + ) + + def test_on_message_calls_decode( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + check_status = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._check_status" + ) + decode_messages = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._decode_message" + ) + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.dc_name, payload=FAKE.pystr() + ) + + mqtt_fixture._on_message( + _client=FAKE.pystr(), _userdata=FAKE.pystr(), msg=mqtt_message + ) + + check_status.assert_called_once_with(msg=mqtt_message) + decode_messages.assert_called_once_with(msg=mqtt_message) + assert caplog.text == "" + + def test_on_message_warns_when_offline( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + check_status = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._check_status" + ) + decode_messages = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._decode_message" + ) + error_message = "Testing error" + decode_messages.side_effect = AssertionError(error_message) + setup_service_status(mqtt_fixture=mqtt_fixture, status="offline") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.dc_name, payload=FAKE.pystr() + ) + + mqtt_fixture._on_message( + _client=FAKE.pystr(), _userdata=FAKE.pystr(), msg=mqtt_message + ) + + check_status.assert_called_once_with(msg=mqtt_message) + decode_messages.assert_not_called() + assert f"{TestMqttTopics.mate_status} is offline" in caplog.text + + def test_on_message_skips_exceptions( + self, + mocker: MockerFixture, + mqtt_fixture: MqttConnector, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + check_status = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._check_status" + ) + decode_messages = mocker.patch( + "src.classes.mqtt_classes.MqttConnector._decode_message" + ) + error_message = "Testing error" + decode_messages.side_effect = AssertionError(error_message) + setup_service_status(mqtt_fixture=mqtt_fixture, status="online") + mqtt_message = create_mqtt_message( + mocker=mocker, topic=TestMqttTopics.dc_name, payload=FAKE.pystr() + ) + + mqtt_fixture._on_message( + _client=FAKE.pystr(), _userdata=FAKE.pystr(), msg=mqtt_message + ) + + check_status.assert_called_once_with(msg=mqtt_message) + decode_messages.assert_called_once_with(msg=mqtt_message) + assert "MQTT on_message raised an exception:" in caplog.text + assert error_message in caplog.text + + def test_passes_get_mqtt_client(self, mocker: MockerFixture): + mqtt_connector = MqttConnector(TestSecretStore) + mocker.patch("src.classes.mqtt_classes.Client.connect") + result = mqtt_connector.get_mqtt_client() - assert isinstance(result, Client) + assert isinstance(result, Client) diff --git a/tests/classes/test_py_functions.py b/tests/classes/test_py_functions.py deleted file mode 100644 index 53874f9..0000000 --- a/tests/classes/test_py_functions.py +++ /dev/null @@ -1,31 +0,0 @@ -# pylint: disable=missing-function-docstring, missing-module-docstring, missing-class-docstring - -import logging -from unittest import mock - -from pytest import LogCaptureFixture - -from classes.py_functions import read_query_settings, write_results_to_csv -from tests.config.consts import FAKE - - -@mock.patch("classes.py_functions.os.path.exists") -@mock.patch("classes.py_functions.open", mock.mock_open()) -def test_passes_write_to_csv(exists, caplog: LogCaptureFixture): - exists.return_value = True - caplog.set_level(logging.INFO) - with mock.patch( - "configparser.ConfigParser.read", return_value=FAKE.pystr() - ) and mock.patch("configparser.ConfigParser.get", return_value=FAKE.pystr()): - write_results_to_csv(FAKE.pystr(), FAKE.pydict()) - - assert "Wrote rows into CSV file at:" in caplog.text - - -def test_passes_read_query_settings(): - with mock.patch( - "configparser.ConfigParser.read", return_value=FAKE.pystr() - ) and mock.patch("configparser.ConfigParser.get", return_value=FAKE.pystr()): - result = read_query_settings("query_settings") - - assert result is not None diff --git a/tests/classes/test_query_classes.py b/tests/classes/test_query_classes.py index 5cabc77..94fa97e 100644 --- a/tests/classes/test_query_classes.py +++ b/tests/classes/test_query_classes.py @@ -1,135 +1,141 @@ # pylint: disable=missing-function-docstring, missing-module-docstring - from pytest import CaptureFixture -from classes.query_classes import QueryBuilder +from src.classes.query_classes import QueryBuilder from tests.config.consts import FAKE -def test_passes_help_string(capsys: CaptureFixture): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.help() - captured = capsys.readouterr() - - assert captured.out != "" - - -def test_passes_basic_query_created(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - - assert ( - str(query_builder) - == f'from(bucket: "{bucket}")\n\t|> range(start: {start_range})' - ) - - -def test_passes_query_with_end_date(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30, end_date=-15) - end_range = FAKE.date_between(start_date=-16) - - query_builder = QueryBuilder( - bucket=bucket, start_range=start_range, end_range=end_range - ) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range}, stop: {end_range})" - ) - - -def test_passes_query_with_filter(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - field = FAKE.pystr() - value = FAKE.pystr() - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.append_filter(field=field, value=value) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range})" - f'\n\t|> filter(fn: (r) => r["{field}"] == "{value}")' - ) - - -def test_passes_query_filter_with_join(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - field = FAKE.pystr() - value = FAKE.pystr() - joiner = FAKE.random_element(elements=["And", "Or"]) - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.append_filter(field=field, value=value, joiner=joiner) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range})" - f'\n\t|> filter(fn: (r) => r["{field}"] == "{value}" {joiner} ' - ) - - -def test_passes_query_filter_with_band(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - field_1 = FAKE.pystr() - value_1 = FAKE.pystr() - field_2 = FAKE.pystr() - value_2 = FAKE.pystr() - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.append_filter( - field=field_1, - value=value_1, - ) - query_builder.append_filter(field=field_2, value=value_2, new_band=True) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range})" - f'\n\t|> filter(fn: (r) => r["{field_1}"] == "{value_1}")' - f'\n\t|> filter(fn: (r) => r["{field_2}"] == "{value_2}")' - ) - - -def test_passes_query_with_aggregate(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - collection_window = FAKE.pystr() - aggregate_function = FAKE.pystr() - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.append_aggregate( - collection_window=collection_window, aggregate_function=aggregate_function - ) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range})" - f"\n\t|> aggregateWindow(every:" - f" {collection_window}, fn: {aggregate_function}" - ) - - -def test_passes_query_with_sort(): - bucket = FAKE.pystr() - start_range = FAKE.date_between(start_date=-30) - field = FAKE.pystr() - desc = FAKE.pybool() - - query_builder = QueryBuilder(bucket=bucket, start_range=start_range) - query_builder.append_sort(field=field, desc=desc) - - assert ( - str(query_builder) == f'from(bucket: "{bucket}")' - f"\n\t|> range(start: {start_range})" - f'\n\t|> sort(columns: ["{field}"], desc: {desc}' - ) +class TestQueryBuilder: + """Test class for Query Builder""" + + def test_passes_help_string(self, capsys: CaptureFixture): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.help() + captured = capsys.readouterr() + + assert captured.out != "" + + def test_passes_repr_format(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + + assert ( + repr(query_builder) + == f"'from(bucket: \"{bucket}\")\\n\\t|> range(start: {start_range})'" + ) + + def test_passes_basic_query_created(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + + assert ( + str(query_builder) + == f'from(bucket: "{bucket}")\n\t|> range(start: {start_range})' + ) + + def test_passes_query_with_end_date(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30, end_date=-15) + end_range = FAKE.date_between(start_date=-16) + + query_builder = QueryBuilder( + bucket=bucket, start_range=start_range, end_range=end_range + ) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range}, stop: {end_range})" + ) + + def test_passes_query_with_filter(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + field = FAKE.pystr() + value = FAKE.pystr() + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.append_filter(field=field, value=value) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range})" + f'\n\t|> filter(fn: (r) => r["{field}"] == "{value}")' + ) + + def test_passes_query_filter_with_join(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + field = FAKE.pystr() + value = FAKE.pystr() + joiner = FAKE.random_element(elements=["And", "Or"]) + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.append_filter(field=field, value=value, joiner=joiner) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range})" + f'\n\t|> filter(fn: (r) => r["{field}"] == "{value}" {joiner} ' + ) + + def test_passes_query_filter_with_band(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + field_1 = FAKE.pystr() + value_1 = FAKE.pystr() + field_2 = FAKE.pystr() + value_2 = FAKE.pystr() + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.append_filter( + field=field_1, + value=value_1, + ) + query_builder.append_filter(field=field_2, value=value_2, new_band=True) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range})" + f'\n\t|> filter(fn: (r) => r["{field_1}"] == "{value_1}")' + f'\n\t|> filter(fn: (r) => r["{field_2}"] == "{value_2}")' + ) + + def test_passes_query_with_aggregate(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + collection_window = FAKE.pystr() + aggregate_function = FAKE.pystr() + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.append_aggregate( + collection_window=collection_window, aggregate_function=aggregate_function + ) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range})" + f"\n\t|> aggregateWindow(every:" + f" {collection_window}, fn: {aggregate_function}" + ) + + def test_passes_query_with_sort(self): + bucket = FAKE.pystr() + start_range = FAKE.date_between(start_date=-30) + field = FAKE.pystr() + desc = FAKE.pybool() + + query_builder = QueryBuilder(bucket=bucket, start_range=start_range) + query_builder.append_sort(field=field, desc=desc) + + assert ( + str(query_builder) == f'from(bucket: "{bucket}")' + f"\n\t|> range(start: {start_range})" + f'\n\t|> sort(columns: ["{field}"], desc: {desc}' + ) diff --git a/tests/config/config.ini b/tests/config/config.ini new file mode 100644 index 0000000..f95b0d5 --- /dev/null +++ b/tests/config/config.ini @@ -0,0 +1,40 @@ +[influx_debugger] +; Logging levels: DEBUG, INFO, WARNING, ERROR, CRITICAL +debug_level = INFO +file_logging = true +; Log rotation can be set to time_based or size_based +log_rotation = size_based +file_location = output/ +file_name = influx_logs.log +format = %%(asctime)s, %%(name)s, %%(threadName)s, %%(levelname)s, %%(message)s +dateformat = %%d/%%m/%%Y, %%H:%%M:%%S +; Rotating file loggers require the following configs +max_file_no = 5 +time_cutover = "midnight" +max_file_bytes = 5242880 + + +[solar_debugger] +;Logging levels: DEBUG, INFO, WARNING, ERROR, CRITICAL +debug_level = INFO +file_logging = true +; Log rotation can be set to time_based or size_based +log_rotation = size_based +file_location = output/ +file_name = solar_logs.log +format = %%(asctime)s, %%(name)s, %%(threadName)s, %%(levelname)s, %%(message)s +dateformat = %%d/%%m/%%Y, %%H:%%M:%%S +mode = "a" +; Rotating file loggers require the following configs +max_file_no = 5 +time_cutover = "midnight" +max_file_bytes = 5242880 + + +[query_settings] +; Can be either 'csv, 'flux' or 'stream' +query_mode = flux +; Following three values are only required for CSV's +csv_location = output/ +csv_name = query_result.csv +csv_mode = w \ No newline at end of file diff --git a/tests/config/consts.py b/tests/config/consts.py index 5ef6c08..8e69d8f 100644 --- a/tests/config/consts.py +++ b/tests/config/consts.py @@ -1,9 +1,14 @@ -# pylint: disable=missing-function-docstring, missing-module-docstring, missing-class-docstring +# pylint: disable=missing-function-docstring, missing-module-docstring, missing-class-docstring, duplicate-code + +from dataclasses import dataclass from faker import Faker FAKE = Faker() +APP_CONFIG = "src/config/config.ini" +TEST_CONFIG = "tests/config/config.ini" + class TestSecretStore: mqtt_secrets = { @@ -22,19 +27,156 @@ class TestSecretStore: TEST_MQTT_ENV = { - "mqtt_host": FAKE.pystr(), - "mqtt_port": str(FAKE.pyint(6)), - "mqtt_user": FAKE.pystr(), - "mqtt_token": FAKE.pystr(), - "mqtt_topic": FAKE.pystr(), + "MQTT_HOST": FAKE.pystr(), + "MQTT_PORT": str(FAKE.pyint(6)), + "MQTT_USER": FAKE.pystr(), + "MQTT_TOKEN": FAKE.pystr(), + "MQTT_TOPIC": FAKE.pystr(), } TEST_INFLUX_ENV = { - "influx_url": FAKE.url(), - "influx_org": FAKE.pystr(), - "influx_bucket": FAKE.pystr(), - "influx_token": FAKE.pystr(), + "INFLUX_URL": FAKE.url(), + "INFLUX_ORG": FAKE.pystr(), + "INFLUX_BUCKET": FAKE.pystr(), + "INFLUX_TOKEN": FAKE.pystr(), +} + +TEST_EMPTY_ENV = { + "MQTT_HOST": "", + "MQTT_PORT": str(FAKE.pyint(6)), + "MQTT_USER": "", + "MQTT_TOKEN": "", + "MQTT_TOPIC": "", + "INFLUX_URL": "", + "INFLUX_ORG": "", + "INFLUX_BUCKET": "", + "INFLUX_TOKEN": "", } TEST_ENV_FULL = dict(TEST_MQTT_ENV, **TEST_INFLUX_ENV) TEST_MAX_PORT_RANGE = 65535 + + +@dataclass +class TestFX: + bytearray = b"\x00\x00\x00\x04t\x00\x04\x00\x02\x01\x12\t\x00" + array = { + "ac_mode": 2, + "aux_on": False, + "battery_voltage": "27.4V", + "buy_current": "0.0A", + "chg_current": "0.0A", + "error_mode": 0, + "input_voltage": "8V", + "inverter_current": "0.0A", + "is_230v": True, + "misc": 9, + "operational_mode": 4, + "output_voltage": "232V", + "sell_current": "0.0A", + "warnings": 0, + } + + +@dataclass +class TestMX: + bytearray = b"\x87\x85\x8b\x00t\x08\x02\x00 \x01\x0f\x02\xa4" + array = { + "amp_hours": "116Ah", + "aux_mode": 8, + "aux_state": False, + "bat_current": "11.7A", + "bat_voltage": "27.1V", + "errors": 0, + "kilowatt_hours": "3.2kWh", + "pv_current": "5A", + "pv_voltage": "67.6V", + "status": 2, + } + + +@dataclass +class TestDC: + bytearray = ( + b"\xff\xe8\x00l\x00\x00\x01\x11d\xff\xf9\x00\x1d\x00\x00\x00!\x00l" + b"\x00\x18\x00T\x00\x1d\x00\x07\x00\x16\x00\x1b\x00\x0e\x00\r\x00J\x00\x1f\x00+" + b"\x00\x0b\x00\x03\x00\t\x00\x0c\x00\x00\x00\x04\x00\x04\xff\xf7\x00\x0c\x00\x00" + b"\xff\xfc\x00\x04\x00\x00c\x00\x00\x00\x02\x15\x00\x00\x00\x00\x00" + ) + array = { + "bat_ah_today": "13Ah", + "bat_current": "8.4A", + "bat_kwh_today": "0.43kWh", + "bat_net_ah": "0Ah", + "bat_net_kwh": "0.02kWh", + "bat_power": "0.22kW", + "bat_voltage": "27.3V", + "days_since_full": "1.1days", + "flags": 33, + "in_ah_today": "27Ah", + "in_current": "10.8A", + "in_kwh_today": "0.74kWh", + "in_power": "0.29kW", + "min_soc_today": "99%", + "out_ah_today": "14Ah", + "out_current": "2.4A", + "out_kwh_today": "0.31kWh", + "out_power": "0.07kW", + "shunta_ah_today": "-4Ah", + "shunta_current": "-2.4A", + "shunta_kwh_today": "-0.09kWh", + "shunta_power": "-0.07kW", + "shuntb_ah_today": "4Ah", + "shuntb_current": "10.8A", + "shuntb_kwh_today": "0.12kWh", + "shuntb_power": "0.29kW", + "shuntc_ah_today": "0Ah", + "shuntc_current": "0.0A", + "shuntc_kwh_today": "0.00kWh", + "shuntc_power": "0.00kW", + "state_of_charge": "100%", + } + + +@dataclass +class TestMqttTopics: + """ + Object which is a model of all the different MQTT topics + """ + + mate_status = "mate/status" + + dc_name = "dc-1" + dc_status = "mate/dc-1/status" + dc_data = "mate/dc-1/dc-status" + dc_raw = "mate/dc-1/stat/raw" + dc_ts = "mate/dc-1/stat/ts" + + fx_name = "fx-1" + fx_status = "mate/fx-1/status" + fx_data = "mate/fx-1/fx-status" + fx_raw = "mate/fx-1/stat/raw" + fx_ts = "mate/fx-1/stat/ts" + + mx_name = "mx-1" + mx_status = "mate/mx-1/status" + mx_data = "mate/mx-1/mx-status" + mx_raw = "mate/mx-1/stat/raw" + mx_ts = "mate/mx-1/stat/ts" + + +# Configs +TEST_CONFIG_FILENAME = "src/config/config.ini" # Py Functions +TEST_INFLUX_QUERY_CONFIG_TITLE = "query_settings" # Influx Query +TEST_INFLUX_DEBUG_CONFIG_TITLE = "influx_debugger" # Influx Query +TEST_SOLAR_DEBUG_CONFIG_TITLE = "solar_debugger" # Solar Runtime + +# Additional Consts +TEST_MAX_PORT_RANGE = 65535 +TEST_TIME_PACKET_SIZE = 4 # Measured in bytes + +# Multi-Threading Processing +# Size of queue, needs to be quite large for the volume of data +TEST_MAX_QUEUE_LENGTH = 150 +# Time to wait when queue is full +TEST_QUEUE_WAIT_TIME = 1 diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/helpers/test_py_functions.py b/tests/helpers/test_py_functions.py new file mode 100644 index 0000000..48a224f --- /dev/null +++ b/tests/helpers/test_py_functions.py @@ -0,0 +1,87 @@ +# pylint: disable=missing-function-docstring, missing-module-docstring, redefined-outer-name +import logging +from configparser import ConfigParser + +from pytest import LogCaptureFixture, fixture, raises +from pytest_mock import MockerFixture + +from src.helpers.py_functions import read_query_settings, write_results_to_csv +from tests.config.consts import APP_CONFIG, FAKE, TEST_CONFIG + + +@fixture +def config_parser_fixture(mocker: MockerFixture): + file_path = FAKE.pystr() + config_parser = mocker.patch( + "src.helpers.py_functions.ConfigParser", return_value=file_path + ) + new_parser = mocker.MagicMock(ConfigParser) + config_parser.return_value = new_parser + new_parser.get.return_value = file_path + file_exists = mocker.patch("src.helpers.py_functions.os.path.exists") + open_file = mocker.patch("src.helpers.py_functions.open", mocker.mock_open()) + makedirs = mocker.patch("src.helpers.py_functions.os.makedirs") + makedirs.side_effect = mocker.MagicMock() + mocker.patch("configparser.ConfigParser.read", return_value=FAKE.pystr()) + return (file_path, file_exists, open_file, makedirs) + + +def test_writes_to_csv(config_parser_fixture, caplog: LogCaptureFixture): + caplog.set_level(logging.INFO) + (file_path, file_exists, open_file, _makedirs) = config_parser_fixture + + write_results_to_csv(FAKE.pystr(), FAKE.pydict()) + + file_exists.assert_called_once_with(file_path) + open_file.assert_called_once() + assert "Wrote rows into CSV file at:" in caplog.text + assert "Failed to write CSV" not in caplog.text + + +def test_makes_dir_when_not_existent(config_parser_fixture, caplog: LogCaptureFixture): + caplog.set_level(logging.INFO) + (file_path, file_exists, open_file, makedirs) = config_parser_fixture + file_exists.return_value = False + + write_results_to_csv(FAKE.pystr(), FAKE.pydict()) + + file_exists.assert_called_once_with(file_path) + makedirs.assert_called_once_with(file_path) + open_file.assert_called_once() + + assert f"Wrote rows into CSV file at: {file_path}" in caplog.text + assert "Failed to write CSV" not in caplog.text + + +def test_fails_write_to_csv(config_parser_fixture, caplog: LogCaptureFixture): + caplog.set_level(logging.INFO) + (_file_path, _file_exists, open_file, _makedirs) = config_parser_fixture + open_file.side_effect = FileNotFoundError + + with raises(FileNotFoundError): + write_results_to_csv(FAKE.pystr(), FAKE.pydict()) + + assert "Failed to write CSV" in caplog.text + + +def test_read_query_settings(mocker: MockerFixture): + mocker.patch("configparser.ConfigParser.read", return_value=FAKE.pystr()) + mocker.patch("configparser.ConfigParser.get", return_value=FAKE.pystr()) + + result = read_query_settings("query_settings") + + assert result is not None + + +def test_config_files_are_consistent(): + app_config = None + test_config = None + + with open(APP_CONFIG) as app_fh: + app_config = app_fh.read() + with open(TEST_CONFIG) as test_fh: + test_config = test_fh.read() + + assert ( + app_config == test_config + ), "Configurations files are different between environments" diff --git a/tests/helpers/test_py_logger.py b/tests/helpers/test_py_logger.py new file mode 100644 index 0000000..a3da3e4 --- /dev/null +++ b/tests/helpers/test_py_logger.py @@ -0,0 +1,180 @@ +# pylint: disable=missing-function-docstring, missing-module-docstring, protected-access, redefined-outer-name +import logging +from logging import Logger, StreamHandler + +import pytest +from pytest import LogCaptureFixture, fixture, mark, raises +from pytest_mock import MockerFixture + +from src.classes.custom_exceptions import MissingConfigurationError +from src.helpers.consts import SOLAR_DEBUG_CONFIG_TITLE +from src.helpers.py_logger import LoggingTools +from tests.config.consts import FAKE, TEST_CONFIG + + +class FakeLogger: + """Storage class for fixture""" + + def __init__(self, logger): + self.fake_logger = logger + self.file_path = TEST_CONFIG + self.log_tools = LoggingTools( + config_name=SOLAR_DEBUG_CONFIG_TITLE, + config_dir=TEST_CONFIG, + logger=self.fake_logger, + ) + self.log_tools._debug_level = logging.DEBUG + self.log_tools._file_format = ( + "%%(asctime)s, %%(name)s, %%(threadName)s, %%(levelname)s, %%(message)s" + ) + self.log_tools._date_format = "%%d/%%m/%%Y, %%H:%%M:%%S" + self.log_tools._max_file_bytes = FAKE.pyint() + self.log_tools._max_file_no = FAKE.pyint() + self.log_tools._mode = "w" + self.log_tools._file_path = self.file_path + + +@fixture +def log_fixture(): + fake_logger = logging.getLogger("Test Logger") + yield fake_logger + fake_logger.handlers.clear() + + +@fixture +def log_tools_fixture(log_fixture: Logger): + yield FakeLogger(log_fixture) + + +class TestLoggingTools: + """Test class for LoggingTools""" + + def test_pass_read_configs(self, log_tools_fixture: FakeLogger): + log_tools = log_tools_fixture.log_tools + log_tools.read_configs() + + def test_read_basic_configs( + self, mocker: MockerFixture, log_tools_fixture: FakeLogger + ): + log_tools = log_tools_fixture.log_tools + basic_config = mocker.Mock() + log_tools._read_basic_config = basic_config + log_tools.read_configs() + basic_config.assert_called() + + def test_read_extra_configs_with_flag( + self, mocker: MockerFixture, log_tools_fixture: FakeLogger + ): + log_tools = log_tools_fixture.log_tools + extra_config = mocker.Mock() + log_tools._read_extra_configs = extra_config + log_tools.read_configs() + extra_config.assert_called() + + @mark.parametrize( + "side_effect", + [ + # Level, Format, DateFormat, FileLogging + [None, "b", "c", "false"], + ["DEBUG", None, "c", "false"], + ["DEBUG", "b", None, "false"], + ["DEBUG", "b", None, None], + ], + ) + def test_read_basic_configs_raise_custom_exception_on_bad_data( + self, + side_effect: list, + mocker: MockerFixture, + log_tools_fixture: FakeLogger, + caplog: LogCaptureFixture, + ): + mocker.patch("src.helpers.py_logger.ConfigParser.get", side_effect=side_effect) + log_tools = log_tools_fixture.log_tools + log_tools._config_name = SOLAR_DEBUG_CONFIG_TITLE + + with raises(MissingConfigurationError): + log_tools._config_parser.read(TEST_CONFIG) + log_tools._read_basic_config() + + assert "Failed to read basic logger configs" in caplog.messages + + @mark.parametrize( + "side_effect", + [ + # LogRotation, FileLocation, FileName, MaxBytes, MaxFiles, Mode + [None, "b", "c", "5", "5", "w"], + ["a", None, "c", "5", "5", "w"], + ["a", "b", None, "5", "5", "w"], + ["a", "b", "c", None, "5", "w"], + ["a", "b", "c", "5", None, "w"], + ["a", "b", "c", "5", "5", None], + ], + ) + def test_read_extra_configs_raise_custom_exception_on_bad_data( + self, + side_effect: list, + mocker: MockerFixture, + log_tools_fixture: FakeLogger, + caplog: LogCaptureFixture, + ): + mocker.patch("src.helpers.py_logger.ConfigParser.get", side_effect=side_effect) + log_tools = log_tools_fixture.log_tools + log_tools._config_name = SOLAR_DEBUG_CONFIG_TITLE + log_tools._config_parser.read(TEST_CONFIG) + + with pytest.raises(MissingConfigurationError): + log_tools._read_extra_configs() + + assert "Failed to read file logger settings in configs" in caplog.messages + + def test_create_stdout_logger( + self, log_tools_fixture: FakeLogger, caplog: LogCaptureFixture + ): + caplog.set_level(logging.INFO) + log_tools = log_tools_fixture.log_tools + fake_logger: Logger = log_tools_fixture.fake_logger + + log_tools.create_loggers() + + assert isinstance(fake_logger.handlers[0], StreamHandler) + assert "Created stdout logger" in caplog.messages + + def test_create_rotating_file_logger_size( + self, + mocker: MockerFixture, + log_tools_fixture: FakeLogger, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + mocker.patch("src.helpers.py_logger.os.path.exists", return_value=True) + mock_handler = mocker.patch("src.helpers.py_logger.RotatingFileHandler") + log_tools: LoggingTools = log_tools_fixture.log_tools + fake_logger: Logger = log_tools_fixture.fake_logger + file_path: str = log_tools_fixture.file_path + log_tools._is_file_logging = True + log_tools._log_rotation = "size_based" + + log_tools.create_loggers() + + assert fake_logger.handlers[1] == mock_handler() + assert f"Created rotating file log file at {file_path}" in caplog.messages + + def test_create_rotating_file_logger_timed( + self, + mocker: MockerFixture, + log_tools_fixture: FakeLogger, + caplog: LogCaptureFixture, + ): + caplog.set_level(logging.INFO) + mocker.patch("src.helpers.py_logger.os.path.exists", return_value=True) + mock_handler = mocker.patch("src.helpers.py_logger.TimedRotatingFileHandler") + log_tools: LoggingTools = log_tools_fixture.log_tools + fake_logger: Logger = log_tools_fixture.fake_logger + file_path: str = log_tools_fixture.file_path + log_tools._is_file_logging = True + log_tools._log_rotation = "time_based" + + log_tools.create_loggers() + + assert fake_logger.handlers[1] == mock_handler() + assert f"Created time rotating file log file at {file_path}" in caplog.messages