Skip to content

Commit

Permalink
Fake data Issue #354 (#360) (#369)
Browse files Browse the repository at this point in the history
* Fake data Issue #354 (#360)

* Fake data Issue #354

* Fake data Issue #354

* Fake data Issue #354

* Changed relative imports and removed pre-commit from requirements.txt

* Using more flexible python3 version to attempt to fix pre-commit.ci build issue

* Written new Unit Test for fake forecast with specified GSP ID

* Written unit tests for all endpoints/routes and modified gsp.py to support new tests

* Removed duplicated Test Cases

* 1st test case for fake environment

* Fixed accidental activation of fake environment in gsp.py module

* Written and tested remaining test cases for is_fake

* Moved isintance check to prior to is_fake condition

* Added 2 Tests to test_national.py and cleaned up some logic in test_gsp test cases

* Modified gsp.py and national.py modules and accompanying test cases to address feedback

* Fixed incorrect for loop iteration through list (should be singular ForecastSQL object) to forecasts object in the test_national.py test cases

* Modified test cases to use NationalForecastValue, ForecastValue, and the ManyForecasts as the return objects

* Modified test cases to uss pytest.fixture() to yield values from db_session

* Possible fix for test_read_latest_all_gsp_normalized() and test_read_latest_all_gsp()

* 1st experiment for test_read_truth_national_gsp() and test_read_forecast_values_gsp()

* 1st experiment with make_fake_gsp_yields()

* 2nd experiment with make_fake_gsp_yields() - modified test_gsp routes

* 3rd experiment with make_fake_gsp_yields() - modified List Comprehension

* 4th experiment with make_fake_gsp_yields() - hard coded _gsp_id_

* Removed yield and fixture

* Experiment: Create a separate tests/fake/test_gsp_fake.py test case module

* split national tests

* role back changes in tests

* fix national test

* rename

* is_fake to is_fake()

* add to readme, don't use any caching in tests

* role back,

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* tests run locally

---------

Co-authored-by: Vikram Pande <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Dec 20, 2024
1 parent 7531e9e commit 97bbca1
Show file tree
Hide file tree
Showing 8 changed files with 194 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
default_language_version:
python: python3.9
python: python3

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ Deployment of this service is now done through terraform cloud.
- `FORECAST_ERROR_HOURS` - using route `/v0/system/GBstatus/check_last_forecast_run` we can check if a forecast has
been made in the last `FORECAST_ERROR_HOURS` hours
- `ADJUST_MW_LIMIT` - the maximum the api is allowed to adjust the national forecast by
- `FAKE` - This allows fake data to be used, rather than connecting to a database

## Routes to SQL tables

Expand Down
4 changes: 3 additions & 1 deletion src/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ def remove_old_cache(
logger.debug(f"Removing {key} from cache, ({value})")
keys_to_remove.append(key)

logger.debug(f"Removing {len(keys_to_remove)} keys from cache")

for key in keys_to_remove:
try:
last_updated.pop(key)
Expand Down Expand Up @@ -129,7 +131,7 @@ def wrapper(*args, **kwargs): # noqa
return response[route_variables]

# use cache
logger.debug("Using cache route")
logger.debug(f"Using cache route, cache made at {last_updated[route_variables]}")
return response[route_variables]

return wrapper
46 changes: 40 additions & 6 deletions src/gsp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
from typing import List, Optional, Union

import structlog
from dotenv import load_dotenv
from fastapi import APIRouter, Depends, Request, Security, status
from fastapi.responses import Response
from fastapi_auth0 import Auth0User
from nowcasting_datamodel.fake import make_fake_forecast, make_fake_forecasts, make_fake_gsp_yields
from nowcasting_datamodel.models import Forecast, ForecastValue, ManyForecasts
from sqlalchemy.orm.session import Session

Expand All @@ -32,6 +34,7 @@

logger = structlog.stdlib.get_logger()
adjust_limit = float(os.getenv("ADJUST_MW_LIMIT", 0.0))
load_dotenv()


router = APIRouter(
Expand All @@ -40,7 +43,12 @@
NationalYield = GSPYield


# corresponds to route /v0/solar/GB/gsp/forecast/all
def is_fake():
"""Start FAKE environment"""
return int(os.environ.get("FAKE", 0))


# corresponds to route /v0/solar/GB/gsp/forecast/all/
@router.get(
"/forecast/all/",
response_model=Union[ManyForecasts, List[OneDatetimeManyForecastValues]],
Expand Down Expand Up @@ -80,11 +88,17 @@ def get_all_available_forecasts(
- **end_datetime_utc**: optional end datetime for the query. e.g '2023-08-12 14:00:00+00:00'
"""

logger.info(f"Get forecasts for all gsps. The option is {historic=} for user {user}")

if gsp_ids is not None:
if isinstance(gsp_ids, str):
gsp_ids = [int(gsp_id) for gsp_id in gsp_ids.split(",")]

if is_fake():
if gsp_ids is None:
gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)]

make_fake_forecasts(gsp_ids=gsp_ids, session=session)

logger.info(f"Get forecasts for all gsps. The option is {historic=} for user {user}")

start_datetime_utc = format_datetime(start_datetime_utc)
end_datetime_utc = format_datetime(end_datetime_utc)
creation_limit_utc = format_datetime(creation_limit_utc)
Expand Down Expand Up @@ -137,6 +151,10 @@ def get_forecasts_for_a_specific_gsp_old_route(
user: Auth0User = Security(get_user()),
) -> Union[Forecast, List[ForecastValue]]:
"""Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/forecast"""

if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)

return get_forecasts_for_a_specific_gsp(
request=request,
gsp_id=gsp_id,
Expand Down Expand Up @@ -185,6 +203,8 @@ def get_forecasts_for_a_specific_gsp(
- **creation_utc_limit**: optional, only return forecasts made before this datetime.
returns the latest forecast made 60 minutes before the target time)
"""
if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)

logger.info(f"Get forecasts for gsp id {gsp_id} forecast of forecast with only values.")
logger.info(f"This is for user {user}")
Expand Down Expand Up @@ -251,11 +271,18 @@ def get_truths_for_all_gsps(
- **start_datetime_utc**: optional start datetime for the query.
- **end_datetime_utc**: optional end datetime for the query.
"""
logger.info(f"Get PV Live estimates values for all gsp id and regime {regime} for user {user}")

if gsp_ids is not None:
if isinstance(gsp_ids, str):
gsp_ids = [int(gsp_id) for gsp_id in gsp_ids.split(",")]

if is_fake():
if gsp_ids is None:
gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)]

make_fake_gsp_yields(gsp_ids=gsp_ids, session=session)

logger.info(f"Get PV Live estimates values for all gsp id and regime {regime} for user {user}")

start_datetime_utc = format_datetime(start_datetime_utc)
end_datetime_utc = format_datetime(end_datetime_utc)

Expand Down Expand Up @@ -286,6 +313,10 @@ def get_truths_for_a_specific_gsp_old_route(
user: Auth0User = Security(get_user()),
) -> List[GSPYield]:
"""Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/pvlive"""

if is_fake():
make_fake_gsp_yields(gsp_ids=[gsp_id], session=session)

return get_truths_for_a_specific_gsp(
request=request,
gsp_id=gsp_id,
Expand Down Expand Up @@ -331,6 +362,9 @@ def get_truths_for_a_specific_gsp(
If not set, defaults to N_HISTORY_DAYS env var, which if not set defaults to yesterday.
"""

if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)

logger.info(
f"Get PV Live estimates values for gsp id {gsp_id} " f"and regime {regime} for user {user}"
)
Expand Down
12 changes: 12 additions & 0 deletions src/national.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from elexonpy.api_client import ApiClient
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Security
from fastapi_auth0 import Auth0User
from nowcasting_datamodel.fake import make_fake_forecast, make_fake_gsp_yields
from nowcasting_datamodel.read.read import get_latest_forecast_for_gsps
from sqlalchemy.orm.session import Session

Expand Down Expand Up @@ -43,6 +44,11 @@
elexon_forecast_api = GenerationForecastApi(api_client)


def is_fake():
"""Start FAKE environment"""
return int(os.environ.get("FAKE", 0))


@router.get(
"/forecast",
response_model=Union[NationalForecast, List[NationalForecastValue]],
Expand Down Expand Up @@ -88,6 +94,9 @@ def get_national_forecast(
"""
logger.debug("Get national forecasts")

if is_fake:
make_fake_forecast(gsp_id=0, session=session)

start_datetime_utc = format_datetime(start_datetime_utc)
end_datetime_utc = format_datetime(end_datetime_utc)
creation_limit_utc = format_datetime(creation_limit_utc)
Expand Down Expand Up @@ -204,6 +213,9 @@ def get_national_pvlive(
"""
logger.info(f"Get national PV Live estimates values " f"for regime {regime} for {user}")

if is_fake():
make_fake_gsp_yields(gsp_ids=[0], session=session)

return get_truth_values_for_a_specific_gsp_from_database(
session=session, gsp_id=0, regime=regime
)
Expand Down
88 changes: 88 additions & 0 deletions src/tests/fake/test_gsp_fake.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from nowcasting_datamodel.models import ForecastValue, LocationWithGSPYields, ManyForecasts

from gsp import GSP_TOTAL, is_fake


def test_is_fake_specific_gsp(monkeypatch, api_client, gsp_id=1):
"""### Test FAKE environment specific _gsp_id_ routes are populating
with fake data.
#### Parameters
- **gsp_id**: Please set to any non-zero integer that is <= GSP_TOTAL
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1

# Specific _gsp_id_ route/endpoint for successful connection
response = api_client.get(f"/v0/solar/GB/gsp/{gsp_id}/forecast")
assert response.status_code == 200

forecast_value = [ForecastValue(**f) for f in response.json()]
assert forecast_value is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")


def test_is_fake_get_truths_for_a_specific_gsp(monkeypatch, api_client, gsp_id=1):
"""### Test FAKE environment specific _gsp_id_ routes are populating
with fake data.
#### Parameters
- **gsp_id**: Please set to any non-zero integer that is <= GSP_TOTAL
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1

# Specific _gsp_id_ route/endpoint for successful connection
response = api_client.get(f"/v0/solar/GB/gsp/{gsp_id}/pvlive")
assert response.status_code == 200

forecast_value = [ForecastValue(**f) for f in response.json()]
assert forecast_value is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")


def test_is_fake_all_available_forecasts(monkeypatch, api_client):
"""Test FAKE environment for all GSPs are populating
with fake data.
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1

# Connect to DB endpoint
response = api_client.get("/v0/solar/GB/gsp/forecast/all/")
assert response.status_code == 200

all_forecasts = ManyForecasts(**response.json())
assert all_forecasts is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")


def test_is_fake_get_truths_for_all_gsps(
monkeypatch, api_client, gsp_ids=list(range(1, GSP_TOTAL))
):
"""Test FAKE environment for all GSPs for yesterday and today
are populating with fake data.
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1

# Connect to DB endpoint
gsp_ids_str = ", ".join(map(str, gsp_ids))
response = api_client.get(f"/v0/solar/GB/gsp/pvlive/all?gsp_ids={gsp_ids_str}")
assert response.status_code == 200

all_forecasts = [LocationWithGSPYields(**f) for f in response.json()]
assert all_forecasts is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")
47 changes: 47 additions & 0 deletions src/tests/fake/test_national_fake.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
""" Test for main app """

from freezegun import freeze_time

from national import is_fake
from pydantic_models import NationalForecastValue, NationalYield


def test_is_fake_national_all_available_forecasts(monkeypatch, api_client):
"""Test FAKE environment for all GSPs are populating
with fake data.
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1
# Connect to DB endpoint
response = api_client.get("/v0/solar/GB/national/forecast")
assert response.status_code == 200

national_forecast_values = [NationalForecastValue(**f) for f in response.json()]
assert national_forecast_values is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")


# The freeze time is needed so the cahce doesnt interact with the test in test_national.py
# Ideally we would not have this
@freeze_time("2021-12-01")
def test_is_fake_national_get_truths_for_all_gsps(monkeypatch, api_client):
"""Test FAKE environment for all GSPs for yesterday and today
are populating with fake data.
"""

monkeypatch.setenv("FAKE", "1")
assert is_fake() == 1
# Connect to DB endpoint
response = api_client.get("/v0/solar/GB/national/pvlive/")
assert response.status_code == 200

print(response.json())

national_forecast_values = [NationalYield(**f) for f in response.json()]
assert national_forecast_values is not None

# Disable is_fake environment
monkeypatch.setenv("FAKE", "0")
2 changes: 2 additions & 0 deletions test-docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ services:
- DB_URL_PV=postgresql://postgres:postgres@postgres_db:5432/postgres
- GIT_PYTHON_REFRESH=quiet
- LOG_LEVEL=DEBUG
- DELETE_CACHE_TIME_SECONDS=0
- CACHE_TIME_SECONDS=0
command: >
bash -c "pytest --cov=./src
&& coverage report -m
Expand Down

0 comments on commit 97bbca1

Please sign in to comment.