diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d20d3b4..430dabf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ default_language_version: - python: python3.9 + python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/README.md b/README.md index dc54313..f78b0b3 100644 --- a/README.md +++ b/README.md @@ -88,6 +88,7 @@ Deployment of this service is now done through terraform cloud. - `FORECAST_ERROR_HOURS` - using route `/v0/system/GBstatus/check_last_forecast_run` we can check if a forecast has been made in the last `FORECAST_ERROR_HOURS` hours - `ADJUST_MW_LIMIT` - the maximum the api is allowed to adjust the national forecast by +- `FAKE` - This allows fake data to be used, rather than connecting to a database ## Routes to SQL tables diff --git a/src/cache.py b/src/cache.py index a019096..a516ea9 100644 --- a/src/cache.py +++ b/src/cache.py @@ -37,6 +37,8 @@ def remove_old_cache( logger.debug(f"Removing {key} from cache, ({value})") keys_to_remove.append(key) + logger.debug(f"Removing {len(keys_to_remove)} keys from cache") + for key in keys_to_remove: try: last_updated.pop(key) @@ -129,7 +131,7 @@ def wrapper(*args, **kwargs): # noqa return response[route_variables] # use cache - logger.debug("Using cache route") + logger.debug(f"Using cache route, cache made at {last_updated[route_variables]}") return response[route_variables] return wrapper diff --git a/src/gsp.py b/src/gsp.py index 4178e8c..073e0ff 100644 --- a/src/gsp.py +++ b/src/gsp.py @@ -4,9 +4,11 @@ from typing import List, Optional, Union import structlog +from dotenv import load_dotenv from fastapi import APIRouter, Depends, Request, Security, status from fastapi.responses import Response from fastapi_auth0 import Auth0User +from nowcasting_datamodel.fake import make_fake_forecast, make_fake_forecasts, make_fake_gsp_yields from nowcasting_datamodel.models import Forecast, ForecastValue, ManyForecasts from sqlalchemy.orm.session import Session @@ -32,6 +34,7 @@ logger = structlog.stdlib.get_logger() adjust_limit = float(os.getenv("ADJUST_MW_LIMIT", 0.0)) +load_dotenv() router = APIRouter( @@ -40,7 +43,12 @@ NationalYield = GSPYield -# corresponds to route /v0/solar/GB/gsp/forecast/all +def is_fake(): + """Start FAKE environment""" + return int(os.environ.get("FAKE", 0)) + + +# corresponds to route /v0/solar/GB/gsp/forecast/all/ @router.get( "/forecast/all/", response_model=Union[ManyForecasts, List[OneDatetimeManyForecastValues]], @@ -80,11 +88,17 @@ def get_all_available_forecasts( - **end_datetime_utc**: optional end datetime for the query. e.g '2023-08-12 14:00:00+00:00' """ - logger.info(f"Get forecasts for all gsps. The option is {historic=} for user {user}") - - if gsp_ids is not None: + if isinstance(gsp_ids, str): gsp_ids = [int(gsp_id) for gsp_id in gsp_ids.split(",")] + if is_fake(): + if gsp_ids is None: + gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)] + + make_fake_forecasts(gsp_ids=gsp_ids, session=session) + + logger.info(f"Get forecasts for all gsps. The option is {historic=} for user {user}") + start_datetime_utc = format_datetime(start_datetime_utc) end_datetime_utc = format_datetime(end_datetime_utc) creation_limit_utc = format_datetime(creation_limit_utc) @@ -137,6 +151,10 @@ def get_forecasts_for_a_specific_gsp_old_route( user: Auth0User = Security(get_user()), ) -> Union[Forecast, List[ForecastValue]]: """Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/forecast""" + + if is_fake(): + make_fake_forecast(gsp_id=gsp_id, session=session) + return get_forecasts_for_a_specific_gsp( request=request, gsp_id=gsp_id, @@ -185,6 +203,8 @@ def get_forecasts_for_a_specific_gsp( - **creation_utc_limit**: optional, only return forecasts made before this datetime. returns the latest forecast made 60 minutes before the target time) """ + if is_fake(): + make_fake_forecast(gsp_id=gsp_id, session=session) logger.info(f"Get forecasts for gsp id {gsp_id} forecast of forecast with only values.") logger.info(f"This is for user {user}") @@ -251,11 +271,18 @@ def get_truths_for_all_gsps( - **start_datetime_utc**: optional start datetime for the query. - **end_datetime_utc**: optional end datetime for the query. """ - logger.info(f"Get PV Live estimates values for all gsp id and regime {regime} for user {user}") - if gsp_ids is not None: + if isinstance(gsp_ids, str): gsp_ids = [int(gsp_id) for gsp_id in gsp_ids.split(",")] + if is_fake(): + if gsp_ids is None: + gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)] + + make_fake_gsp_yields(gsp_ids=gsp_ids, session=session) + + logger.info(f"Get PV Live estimates values for all gsp id and regime {regime} for user {user}") + start_datetime_utc = format_datetime(start_datetime_utc) end_datetime_utc = format_datetime(end_datetime_utc) @@ -286,6 +313,10 @@ def get_truths_for_a_specific_gsp_old_route( user: Auth0User = Security(get_user()), ) -> List[GSPYield]: """Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/pvlive""" + + if is_fake(): + make_fake_gsp_yields(gsp_ids=[gsp_id], session=session) + return get_truths_for_a_specific_gsp( request=request, gsp_id=gsp_id, @@ -331,6 +362,9 @@ def get_truths_for_a_specific_gsp( If not set, defaults to N_HISTORY_DAYS env var, which if not set defaults to yesterday. """ + if is_fake(): + make_fake_forecast(gsp_id=gsp_id, session=session) + logger.info( f"Get PV Live estimates values for gsp id {gsp_id} " f"and regime {regime} for user {user}" ) diff --git a/src/national.py b/src/national.py index ce9135f..ef03d28 100644 --- a/src/national.py +++ b/src/national.py @@ -10,6 +10,7 @@ from elexonpy.api_client import ApiClient from fastapi import APIRouter, Depends, HTTPException, Query, Request, Security from fastapi_auth0 import Auth0User +from nowcasting_datamodel.fake import make_fake_forecast, make_fake_gsp_yields from nowcasting_datamodel.read.read import get_latest_forecast_for_gsps from sqlalchemy.orm.session import Session @@ -43,6 +44,11 @@ elexon_forecast_api = GenerationForecastApi(api_client) +def is_fake(): + """Start FAKE environment""" + return int(os.environ.get("FAKE", 0)) + + @router.get( "/forecast", response_model=Union[NationalForecast, List[NationalForecastValue]], @@ -88,6 +94,9 @@ def get_national_forecast( """ logger.debug("Get national forecasts") + if is_fake: + make_fake_forecast(gsp_id=0, session=session) + start_datetime_utc = format_datetime(start_datetime_utc) end_datetime_utc = format_datetime(end_datetime_utc) creation_limit_utc = format_datetime(creation_limit_utc) @@ -204,6 +213,9 @@ def get_national_pvlive( """ logger.info(f"Get national PV Live estimates values " f"for regime {regime} for {user}") + if is_fake(): + make_fake_gsp_yields(gsp_ids=[0], session=session) + return get_truth_values_for_a_specific_gsp_from_database( session=session, gsp_id=0, regime=regime ) diff --git a/src/tests/fake/test_gsp_fake.py b/src/tests/fake/test_gsp_fake.py new file mode 100644 index 0000000..79780bd --- /dev/null +++ b/src/tests/fake/test_gsp_fake.py @@ -0,0 +1,88 @@ +from nowcasting_datamodel.models import ForecastValue, LocationWithGSPYields, ManyForecasts + +from gsp import GSP_TOTAL, is_fake + + +def test_is_fake_specific_gsp(monkeypatch, api_client, gsp_id=1): + """### Test FAKE environment specific _gsp_id_ routes are populating + with fake data. + + #### Parameters + - **gsp_id**: Please set to any non-zero integer that is <= GSP_TOTAL + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + + # Specific _gsp_id_ route/endpoint for successful connection + response = api_client.get(f"/v0/solar/GB/gsp/{gsp_id}/forecast") + assert response.status_code == 200 + + forecast_value = [ForecastValue(**f) for f in response.json()] + assert forecast_value is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") + + +def test_is_fake_get_truths_for_a_specific_gsp(monkeypatch, api_client, gsp_id=1): + """### Test FAKE environment specific _gsp_id_ routes are populating + with fake data. + + #### Parameters + - **gsp_id**: Please set to any non-zero integer that is <= GSP_TOTAL + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + + # Specific _gsp_id_ route/endpoint for successful connection + response = api_client.get(f"/v0/solar/GB/gsp/{gsp_id}/pvlive") + assert response.status_code == 200 + + forecast_value = [ForecastValue(**f) for f in response.json()] + assert forecast_value is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") + + +def test_is_fake_all_available_forecasts(monkeypatch, api_client): + """Test FAKE environment for all GSPs are populating + with fake data. + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + + # Connect to DB endpoint + response = api_client.get("/v0/solar/GB/gsp/forecast/all/") + assert response.status_code == 200 + + all_forecasts = ManyForecasts(**response.json()) + assert all_forecasts is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") + + +def test_is_fake_get_truths_for_all_gsps( + monkeypatch, api_client, gsp_ids=list(range(1, GSP_TOTAL)) +): + """Test FAKE environment for all GSPs for yesterday and today + are populating with fake data. + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + + # Connect to DB endpoint + gsp_ids_str = ", ".join(map(str, gsp_ids)) + response = api_client.get(f"/v0/solar/GB/gsp/pvlive/all?gsp_ids={gsp_ids_str}") + assert response.status_code == 200 + + all_forecasts = [LocationWithGSPYields(**f) for f in response.json()] + assert all_forecasts is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") diff --git a/src/tests/fake/test_national_fake.py b/src/tests/fake/test_national_fake.py new file mode 100644 index 0000000..bd8ba8b --- /dev/null +++ b/src/tests/fake/test_national_fake.py @@ -0,0 +1,47 @@ +""" Test for main app """ + +from freezegun import freeze_time + +from national import is_fake +from pydantic_models import NationalForecastValue, NationalYield + + +def test_is_fake_national_all_available_forecasts(monkeypatch, api_client): + """Test FAKE environment for all GSPs are populating + with fake data. + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + # Connect to DB endpoint + response = api_client.get("/v0/solar/GB/national/forecast") + assert response.status_code == 200 + + national_forecast_values = [NationalForecastValue(**f) for f in response.json()] + assert national_forecast_values is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") + + +# The freeze time is needed so the cahce doesnt interact with the test in test_national.py +# Ideally we would not have this +@freeze_time("2021-12-01") +def test_is_fake_national_get_truths_for_all_gsps(monkeypatch, api_client): + """Test FAKE environment for all GSPs for yesterday and today + are populating with fake data. + """ + + monkeypatch.setenv("FAKE", "1") + assert is_fake() == 1 + # Connect to DB endpoint + response = api_client.get("/v0/solar/GB/national/pvlive/") + assert response.status_code == 200 + + print(response.json()) + + national_forecast_values = [NationalYield(**f) for f in response.json()] + assert national_forecast_values is not None + + # Disable is_fake environment + monkeypatch.setenv("FAKE", "0") diff --git a/test-docker-compose.yml b/test-docker-compose.yml index 1870d6a..6838d11 100644 --- a/test-docker-compose.yml +++ b/test-docker-compose.yml @@ -21,6 +21,8 @@ services: - DB_URL_PV=postgresql://postgres:postgres@postgres_db:5432/postgres - GIT_PYTHON_REFRESH=quiet - LOG_LEVEL=DEBUG + - DELETE_CACHE_TIME_SECONDS=0 + - CACHE_TIME_SECONDS=0 command: > bash -c "pytest --cov=./src && coverage report -m