Skip to content

Commit

Permalink
feat(caching): Now allows saving and loading data to local system
Browse files Browse the repository at this point in the history
- Allows saving and caching data en masse from the api
- Reloads the data from system on startup if specified
- Add few tests using `pytest`
  • Loading branch information
FallenDeity committed Dec 20, 2023
1 parent 9ed3f66 commit fc029cc
Show file tree
Hide file tree
Showing 38 changed files with 444 additions and 79 deletions.
10 changes: 10 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,13 @@ repos:
language: python
stages:
- commit

- repo: local
hooks:
- id: pytest
name: pytest
pass_filenames: false
entry: poetry run pytest
language: system
stages:
- commit
2 changes: 1 addition & 1 deletion docs/guide/advanced_cache.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ async def main():
except FileNotFoundError:
client.logger.info("Loading berry flavors from API...")
await client.berry.cache.berry_flavor.wait_until_ready()
await client.berry.cache.berry_flavor.load_all(client=client.http)
await client.berry.cache.berry_flavor.load_all()
await client.berry.cache.berry_flavor.save()
client.logger.info(f"Loaded {len(client.berry.cache.berry_flavor)} berry flavors.")
await client.close()
Expand Down
2 changes: 1 addition & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ markdown_extensions:
alternate_style: true

extra:
version: 0.2.3
version: 0.2.4
generator: false
social:
- icon: fontawesome/brands/github
Expand Down
2 changes: 1 addition & 1 deletion mkdocs_R.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ markdown_extensions:
alternate_style: true

extra:
version: 0.2.3
version: 0.2.4
generator: false
social:
- icon: fontawesome/brands/github
Expand Down
2 changes: 1 addition & 1 deletion pokelance/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.2.3"
__version__ = "0.2.4"
__author__ = "FallenDeity"

from .client import PokeLance
Expand Down
40 changes: 24 additions & 16 deletions pokelance/cache/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import attrs

if t.TYPE_CHECKING:
from pokelance import models # noqa: F401
from pokelance.http import HttpClient, Route # noqa: F401
from pokelance import PokeLance, models # noqa: F401
from pokelance.http import Route # noqa: F401
from pokelance.models import BaseModel # noqa: F401


Expand Down Expand Up @@ -106,6 +106,8 @@ class BaseCache(t.MutableMapping[_KT, _VT]):
The endpoints that are cached.
_endpoints_cached: bool
Whether or not the endpoints are cached.
_client: pokelance.PokeLance
The client that this cache is for.
Examples
--------
Expand All @@ -130,6 +132,8 @@ class BaseCache(t.MutableMapping[_KT, _VT]):
>>> asyncio.run(main())
"""

_client: "PokeLance"

def __init__(self, max_size: int = 100) -> None:
self._max_size = max_size
self._cache: t.Dict[_KT, _VT] = {}
Expand Down Expand Up @@ -201,8 +205,9 @@ def load_documents(self, data: t.List[t.Dict[str, str]]) -> None:
self._endpoints_cached = True

async def wait_until_ready(self) -> None:
"""Wait until the cache is ready."""
while not self._endpoints_cached:
"""Wait until the all the endpoints are cached."""
await self._client.http.connect()
while not self._endpoints_cached and self._client.cache_endpoints:
await asyncio.sleep(0.5)

async def save(self, path: str = ".") -> None:
Expand All @@ -214,9 +219,14 @@ async def save(self, path: str = ".") -> None:
The path to save the cache to.
"""
pathlib.Path(path).mkdir(parents=True, exist_ok=True)
dummy: t.Dict[str, t.Dict[str, t.Any]] = {k.endpoint: attrs.asdict(v) for k, v in self.items()}
dummy: t.Dict[str, t.Dict[str, t.Any]] = {k.endpoint: v.raw for k, v in self.items()}
async with aiofiles.open(pathlib.Path(f"{path}/{self.__class__.__name__}.json"), "w") as f:
await f.write(json.dumps(dummy, indent=4, ensure_ascii=False))
await f.write("{\n")
for n, (k, v) in enumerate(dummy.items()):
await f.write("\n".join([4 * " " + i for i in f'"{k}": {json.dumps(v, indent=4)}'.split("\n")]))
if n != len(dummy) - 1:
await f.write(",\n")
await f.write("\n}")

async def load(self, path: str = ".") -> None:
"""Load the cache from a file.
Expand All @@ -235,24 +245,22 @@ async def load(self, path: str = ".") -> None:
route = route_model(endpoint=endpoint)
self.setdefault(route, model.from_payload(info))

async def load_all(self, client: "HttpClient") -> None:
"""Load all documents into the cache.
Parameters
----------
client: HttpClient
The client to use to load the documents.
async def load_all(self) -> None:
"""
Load all documents/data from api into the cache. (Endpoints must be cached first)
"""
if not self._endpoints_cached:
raise RuntimeError("The endpoints have not been cached yet.")
client._client.logger.info(f"Loading {self.__class__.__name__}...")
self._client.logger.info(f"Loading {self.__class__.__name__}...")
route_model = importlib.import_module("pokelance.http").__dict__["Route"]
value_type = str(self.__orig_bases__[0].__args__[1]).split(".")[-1] # type: ignore
model: "models.BaseModel" = importlib.import_module("pokelance.models").__dict__[value_type]
self._max_size = len(self._endpoints)
for endpoint in self._endpoints.values():
route = route_model(endpoint=f"/{endpoint.url.strip('/').split('/')[-2]}/{str(endpoint)}")
self.setdefault(route, model.from_payload(await client.request(route)))
client._client.logger.info(f"Loaded {self.__class__.__name__}.")
data = self.get(route, None)
self.setdefault(route, data if data else model.from_payload(await self._client.http.request(route)))
self._client.logger.info(f"Loaded {self.__class__.__name__}.")

@property
def endpoints(self) -> t.Dict[str, Endpoint]:
Expand Down
37 changes: 36 additions & 1 deletion pokelance/cache/cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@
TypeCache,
)

if t.TYPE_CHECKING:
from pokelance import PokeLance


__all__: t.Tuple[str, ...] = (
"Cache",
"Base",
Expand Down Expand Up @@ -83,6 +87,19 @@ class Base:

max_size: int

def set_client(self, client: "PokeLance") -> None:
"""Set the client for the cache.
Parameters
----------
client: pokelance.PokeLance
The client to set.
"""
obj: attrs.Attribute[BaseCache[t.Any, t.Any]]
for obj in self.__attrs_attrs__: # type: ignore
if isinstance(obj.default, BaseCache) and obj.default is not None:
obj.default._client = client

def set_size(self, max_size: int = 100) -> None:
"""Set the maximum cache size.
Expand Down Expand Up @@ -374,6 +391,8 @@ class Cache:
Attributes
----------
client: PokeLance
The pokelance client.
max_size: int
The maximum cache size.
berry: Berry
Expand All @@ -398,6 +417,7 @@ class Cache:
The pokemon cache.
"""

client: "PokeLance"
max_size: int = 100
berry: Berry = attrs.field(default=Berry(max_size=max_size))
contest: Contest = attrs.field(default=Contest(max_size=max_size))
Expand All @@ -413,8 +433,23 @@ class Cache:
def __attrs_post_init__(self) -> None:
obj: attrs.Attribute[Base]
for obj in self.__attrs_attrs__: # type: ignore
if isinstance(obj.default, Base) and obj.default and obj.default.max_size != self.max_size:
if isinstance(obj.default, Base) and obj.default:
obj.default.set_size(self.max_size)
obj.default.set_client(self.client)

def set_size(self, max_size: int = 100) -> None:
"""Set the maximum cache size.
Parameters
----------
max_size: int
The maximum cache size.
"""
self.max_size = max_size
obj: attrs.Attribute[Base]
for obj in self.__attrs_attrs__: # type: ignore
if isinstance(obj.default, Base) and obj.default is not None:
obj.default.set_size(max_size)

def load_documents(self, category: str, _type: str, data: t.List[t.Dict[str, str]]) -> None:
"""Loads the endpoint data into the cache.
Expand Down
13 changes: 13 additions & 0 deletions pokelance/client.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import asyncio
import typing as t
from functools import lru_cache
from pathlib import Path
Expand Down Expand Up @@ -250,6 +251,8 @@ async def from_url(self, url: str) -> BaseType:
------
ValueError
If the url is invalid.
ResourceNotFound
If the data is not found.
"""
if params := ExtensionEnum.validate_url(url):
return await self.getch_data(params.extension, params.category, params.value)
Expand All @@ -271,6 +274,16 @@ async def get_image_async(self, url: str) -> bytes:
"""
return await self._http.load_image(url)

async def wait_until_ready(self) -> None:
"""
Waits until the http client caches all the endpoints.
"""
await self._http.connect()
self.logger.info("Waiting until ready...")
while self._http._tasks_queue and self.cache_endpoints:
await asyncio.sleep(0.5)
self.logger.info("Ready!")

@property
def ext_tasks(self) -> t.List[t.Tuple[t.Callable[[], t.Coroutine[t.Any, t.Any, None]], str]]:
"""
Expand Down
4 changes: 4 additions & 0 deletions pokelance/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ class ShowdownEnum(BaseEnum):
FRONT_SHINY = PATH + "shiny/{}.gif"
BACK_DEFAULT = PATH + "back/{}.gif"
BACK_SHINY = PATH + "/back/shiny/{}.gif"
FRONT_FEMALE = PATH + "female/{}.gif"
FRONT_SHINY_FEMALE = PATH + "shiny/female/{}.gif"
BACK_FEMALE = PATH + "back/female/{}.gif"
BACK_SHINY_FEMALE = PATH + "back/shiny/female/{}.gif"

def __str__(self) -> str:
return self.value
Expand Down
16 changes: 10 additions & 6 deletions pokelance/http/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def __init__(
self._client = client
self.session = session
self._is_ready = False
self._cache = Cache(max_size=cache_size)
self._cache = Cache(max_size=cache_size, client=self._client)
self._tasks_queue: t.List[asyncio.Task[None]] = []

async def _load_ext(self, coroutine: t.Callable[[], t.Coroutine[t.Any, t.Any, None]], message: str) -> None:
Expand Down Expand Up @@ -118,7 +118,8 @@ async def close(self) -> None:

async def connect(self) -> None:
"""Connects the HTTP client."""
self.session = self.session or aiohttp.ClientSession()
if self.session is None:
self.session = self.session or aiohttp.ClientSession()
if not self._is_ready:
if self._client.cache_endpoints:
await self._schedule_tasks()
Expand All @@ -142,8 +143,7 @@ async def request(self, route: Route) -> t.Any:
pokelance.exceptions.HTTPException
An error occurred while making the request.
"""
if self.session is None or not self._is_ready:
await self.connect()
await self.connect()
if self.session is not None:
async with self.session.request(route.method, route.url, params=route.payload) as response:
if 300 > response.status >= 200:
Expand All @@ -167,9 +167,13 @@ async def load_image(self, url: str) -> bytes:
-------
bytes
The image.
Raises
------
pokelance.exceptions.ImageNotFound
The image was not found.
"""
if self.session is None or not self._is_ready:
await self.connect()
await self.connect()
_image_formats = ("png", "jpg", "jpeg", "gif", "webp", "svg")
if self.session is not None:
async with self.session.get(url) as response:
Expand Down
13 changes: 12 additions & 1 deletion pokelance/logger.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import datetime
import enum
import logging
import os
import pathlib
import typing as t

Expand All @@ -24,6 +25,13 @@ class LogLevelColors(enum.Enum):
FLAIR = "\033[95m"


class RelativePathFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
"""Filter the log record."""
record.pathname = record.pathname.replace(os.getcwd(), "~")
return True


class Formatter(logging.Formatter):
"""Format the log record."""

Expand Down Expand Up @@ -105,17 +113,20 @@ class Logger(logging.Logger):
def __init__(self, *, name: str, level: int = logging.DEBUG, file_logging: bool = False) -> None:
super().__init__(name, level)
self._handler = logging.StreamHandler()
self._handler.addFilter(RelativePathFilter())
self._handler.setFormatter(Formatter())
self.addHandler(self._handler)
if file_logging:
self._file_handler = FileHandler(ext=name)
self._file_handler.addFilter(RelativePathFilter())
self.addHandler(self._file_handler)
logging.addLevelName(FLAIR, "FLAIR")

def set_formatter(self, formatter: logging.Formatter) -> None:
"""Set the formatter."""
self._handler.setFormatter(formatter)
self._file_handler.setFormatter(formatter)
if self._file_handler is not None:
self._file_handler.setFormatter(formatter)

def flair(self, message: str, *args: t.Any, **kwargs: t.Any) -> None:
"""Record a flair log."""
Expand Down
4 changes: 3 additions & 1 deletion pokelance/models/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
class BaseModel(attrs.AttrsInstance):
"""Base model for all models"""

raw: t.Dict[str, t.Any] = attrs.field(factory=dict, repr=False, eq=False, order=False)

def to_dict(self) -> t.Dict[str, t.Any]:
"""Convert the model to a dict
Expand All @@ -31,4 +33,4 @@ def from_payload(cls, payload: t.Dict[str, t.Any]) -> "BaseModel":
BaseModel
The model created from the payload.
"""
return cls()
return cls(raw=payload)
3 changes: 3 additions & 0 deletions pokelance/models/abstract/berry.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class Berry(BaseModel):
@classmethod
def from_payload(cls, data: t.Dict[str, t.Any]) -> "Berry":
return cls(
raw=data,
id=data.get("id", 0),
name=data.get("name", ""),
growth_time=data.get("growth_time", 0),
Expand Down Expand Up @@ -103,6 +104,7 @@ class BerryFirmness(BaseModel):
@classmethod
def from_payload(cls, data: t.Dict[str, t.Any]) -> "BerryFirmness":
return cls(
raw=data,
id=data.get("id", 0),
name=data.get("name", ""),
berries=[NamedResource.from_payload(berry) for berry in data.get("berries", [])],
Expand Down Expand Up @@ -137,6 +139,7 @@ class BerryFlavor(BaseModel):
@classmethod
def from_payload(cls, data: t.Dict[str, t.Any]) -> "BerryFlavor":
return cls(
raw=data,
id=data.get("id", 0),
name=data.get("name", ""),
berries=[FlavorBerryMap.from_payload(berry) for berry in data.get("berries", [])],
Expand Down
Loading

0 comments on commit fc029cc

Please sign in to comment.