Skip to content

Commit

Permalink
Added missing files needed to work in this repository from a clean co…
Browse files Browse the repository at this point in the history
…mputer
  • Loading branch information
Erling Paulsen committed Feb 17, 2023
1 parent 471ae05 commit 1ab54f5
Show file tree
Hide file tree
Showing 12 changed files with 472 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
* text=auto eol=lf
13 changes: 13 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,16 @@ pythonenv*
.coverage
venv
.venv

# Ignore stuff that HA puts in repo when running under debugger
.storage/
blueprints/
home-assistant*
.HA_VERSION
*.yaml
!configuration.yaml
setup.cfg


#File generated by test script
sensor-data.txt
18 changes: 18 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Launch HomeAssistant",
"type": "python",
"request": "launch",
"program": "/usr/local/python/bin/hass",
"args": [
"-c",
"${workspaceFolder}",
"--debug"
],
"justMyCode": true
}
]
}
53 changes: 53 additions & 0 deletions configuration.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@

# Loads default set of integrations. Do not remove.
default_config:

logger:
default: info
logs:
custom_components.energytariff: debug

homeassistant:
allowlist_external_dirs:
- "/workspaces/grid-cap-watcher"

sensor:
- platform: file
name: "power_usage"
unit_of_measurement: "W"
file_path: /workspaces/grid-cap-watcher/sensor-data.txt

- platform: energytariff
entity_id: "sensor.power_usage"
max_power: 15900
target_energy: 10
levels:
- name: "Trinn 1: 0-2 kWh"
threshold: 2
price: 135
- name: "Trinn 2: 2-5 kWh"
threshold: 5
price: 170
- name: "Trinn 3: 5-10 kWh"
threshold: 10
price: 290
- name: "Trinn 4: 10-15 kWh"
threshold: 15
price: 600
- name: "Trinn 5: 15-20 kWh"
threshold: 20
price: 800



# Load frontend themes from the themes folder
frontend:
themes: !include_dir_merge_named themes

# Text to speech
tts:
- platform: google_translate

#automation: !include automations.yaml
#script: !include scripts.yaml
#scene: !include scenes.yaml
9 changes: 9 additions & 0 deletions rand.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

#Writes a random value to a text file. Same text file is configured in configuration.yaml as a power sensor
while true
do
rand=$(( ( RANDOM % 9000 ) + 1000 ))
echo $rand > sensor-data.txt
sleep 15
done
1 change: 1 addition & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Tests for grid-cap-watcher integration."""
41 changes: 41 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Global fixtures for grid-cap-watcher integration."""
from unittest.mock import patch

import pytest

pytest_plugins = "pytest_homeassistant_custom_component"


# This fixture is used to prevent HomeAssistant from attempting to create and dismiss persistent
# notifications. These calls would fail without this fixture since the persistent_notification
# integration is never loaded during a test.
@pytest.fixture(name="skip_notifications", autouse=True)
def skip_notifications_fixture():
"""Skip notification calls."""
with patch("homeassistant.components.persistent_notification.async_create"), patch(
"homeassistant.components.persistent_notification.async_dismiss"
):
yield


# This fixture, when used, will result in calls to async_get_data to return None. To have the call
# return a value, we would add the `return_value=<VALUE_TO_RETURN>` parameter to the patch call.
@pytest.fixture(name="bypass_get_data")
def bypass_get_data_fixture():
"""Skip calls to get data from API."""
with patch(
"custom_components.grid_energy_level.GridCapWatcherApiClient.async_get_data"
):
yield


# In this fixture, we are forcing calls to async_get_data to raise an Exception. This is useful
# for exception handling.
@pytest.fixture(name="error_on_get_data")
def error_get_data_fixture():
"""Simulate error when retrieving data from API."""
with patch(
"custom_components.grid_energy_level.GridCapWatcherApiClient.async_get_data",
side_effect=Exception,
):
yield
9 changes: 9 additions & 0 deletions tests/const.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
"""Constants for grid-cap-watcher tests."""
# from custom_components.grid_energy_level.const import (
# CONF_PASSWORD,
# )
# from custom_components.grid_energy_level.const import (
# CONF_USERNAME,
# )

# MOCK_CONFIG = {CONF_USERNAME: "test_username", CONF_PASSWORD: "test_password"}
87 changes: 87 additions & 0 deletions tests/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""Tests for grid-cap-watcher api."""
import asyncio

import aiohttp
from custom_components.grid_energy_level.api import (
GridCapWatcherApiClient,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession


async def test_api(hass, aioclient_mock, caplog):
"""Test API calls."""

# To test the api submodule, we first create an instance of our API client
api = GridCapWatcherApiClient("test", "test", async_get_clientsession(hass))

# Use aioclient_mock which is provided by `pytest_homeassistant_custom_components`
# to mock responses to aiohttp requests. In this case we are telling the mock to
# return {"test": "test"} when a `GET` call is made to the specified URL. We then
# call `async_get_data` which will make that `GET` request.
aioclient_mock.get(
"https://jsonplaceholder.typicode.com/posts/1", json={"test": "test"}
)
assert await api.async_get_data() == {"test": "test"}

# We do the same for `async_set_title`. Note the difference in the mock call
# between the previous step and this one. We use `patch` here instead of `get`
# because we know that `async_set_title` calls `api_wrapper` with `patch` as the
# first parameter
aioclient_mock.patch("https://jsonplaceholder.typicode.com/posts/1")
assert await api.async_set_title("test") is None

# In order to get 100% coverage, we need to test `api_wrapper` to test the code
# that isn't already called by `async_get_data` and `async_set_title`. Because the
# only logic that lives inside `api_wrapper` that is not being handled by a third
# party library (aiohttp) is the exception handling, we also want to simulate
# raising the exceptions to ensure that the function handles them as expected.
# The caplog fixture allows access to log messages in tests. This is particularly
# useful during exception handling testing since often the only action as part of
# exception handling is a logging statement
caplog.clear()
aioclient_mock.put(
"https://jsonplaceholder.typicode.com/posts/1", exc=asyncio.TimeoutError
)
assert (
await api.api_wrapper("put", "https://jsonplaceholder.typicode.com/posts/1")
is None
)
assert (
len(caplog.record_tuples) == 1
and "Timeout error fetching information from" in caplog.record_tuples[0][2]
)

caplog.clear()
aioclient_mock.post(
"https://jsonplaceholder.typicode.com/posts/1", exc=aiohttp.ClientError
)
assert (
await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/1")
is None
)
assert (
len(caplog.record_tuples) == 1
and "Error fetching information from" in caplog.record_tuples[0][2]
)

caplog.clear()
aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/2", exc=Exception)
assert (
await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/2")
is None
)
assert (
len(caplog.record_tuples) == 1
and "Something really wrong happened!" in caplog.record_tuples[0][2]
)

caplog.clear()
aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/3", exc=TypeError)
assert (
await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/3")
is None
)
assert (
len(caplog.record_tuples) == 1
and "Error parsing information from" in caplog.record_tuples[0][2]
)
120 changes: 120 additions & 0 deletions tests/test_config_flow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
"""Test grid-cap-watcher config flow."""
from unittest.mock import patch

import pytest
from custom_components.grid_energy_level.const import (
BINARY_SENSOR,
)
from custom_components.grid_energy_level.const import (
DOMAIN,
)
from custom_components.grid_energy_level.const import (
PLATFORMS,
)
from custom_components.grid_energy_level.const import (
SENSOR,
)
from custom_components.grid_energy_level.const import (
SWITCH,
)
from homeassistant import config_entries
from homeassistant import data_entry_flow
from pytest_homeassistant_custom_component.common import MockConfigEntry

from .const import MOCK_CONFIG


# This fixture bypasses the actual setup of the integration
# since we only want to test the config flow. We test the
# actual functionality of the integration in other test modules.
@pytest.fixture(autouse=True)
def bypass_setup_fixture():
"""Prevent setup."""
with patch(
"custom_components.grid_energy_level.async_setup",
return_value=True,
), patch(
"custom_components.grid_energy_level.async_setup_entry",
return_value=True,
):
yield


# Here we simiulate a successful config flow from the backend.
# Note that we use the `bypass_get_data` fixture here because
# we want the config flow validation to succeed during the test.
async def test_successful_config_flow(hass, bypass_get_data):
"""Test a successful config flow."""
# Initialize a config flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)

# Check that the config flow shows the user form as the first step
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"

# If a user were to enter `test_username` for username and `test_password`
# for password, it would result in this function call
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)

# Check that the config flow is complete and a new entry is created with
# the input data
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "test_username"
assert result["data"] == MOCK_CONFIG
assert result["result"]


# In this case, we want to simulate a failure during the config flow.
# We use the `error_on_get_data` mock instead of `bypass_get_data`
# (note the function parameters) to raise an Exception during
# validation of the input config.
async def test_failed_config_flow(hass, error_on_get_data):
"""Test a failed config flow due to credential validation failure."""

result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)

assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"

result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)

assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "auth"}


# Our config flow also has an options flow, so we must test it as well.
async def test_options_flow(hass):
"""Test an options flow."""
# Create a new MockConfigEntry and add to HASS (we're bypassing config
# flow entirely)
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test")
entry.add_to_hass(hass)

# Initialize an options flow
await hass.config_entries.async_setup(entry.entry_id)
result = await hass.config_entries.options.async_init(entry.entry_id)

# Verify that the first options step is a user form
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"

# Enter some fake data into the form
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={platform: platform != SENSOR for platform in PLATFORMS},
)

# Verify that the flow finishes
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "test_username"

# Verify that the options were updated
assert entry.options == {BINARY_SENSOR: True, SENSOR: False, SWITCH: True}
Loading

0 comments on commit 1ab54f5

Please sign in to comment.