Skip to content

Commit

Permalink
remove unused imports and fix docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
Thomas Maschler committed Jun 6, 2020
1 parent 8d53a4a commit 22a2ed0
Show file tree
Hide file tree
Showing 37 changed files with 101 additions and 566 deletions.
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
line_length = 88
multi_line_output = 3
include_trailing_comma = True
known_third_party = alembic,asyncpg,boto3,click,create_table,docker,fastapi,fiona,geoalchemy2,gino,gino_starlette,load,moto,pendulum,post_processing,psycopg2,pydantic,pytest,rasterio,requests,sqlalchemy,sqlalchemy_utils,starlette
known_third_party = alembic,asyncpg,boto3,click,docker,fastapi,fiona,geoalchemy2,gino,gino_starlette,moto,pendulum,psycopg2,pydantic,pytest,rasterio,requests,sqlalchemy,sqlalchemy_utils,starlette
6 changes: 2 additions & 4 deletions app/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@


class ContextualGino(Gino):
"""
Overide the Gino Metadata object to allow to dynamically change the binds
"""
"""Override the Gino Metadata object to allow to dynamically change the binds."""

@property
def bind(self):
Expand Down Expand Up @@ -61,7 +59,7 @@ def __init__(self, method):
self.method = method

async def __aenter__(self):
""" initialize objects """
"""initialize objects"""
try:
e = CURRENT_ENGINE.get()
except LookupError:
Expand Down
5 changes: 2 additions & 3 deletions app/crud/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
async def update_data(
row: db.Model, input_data: Union[BaseModel, Dict[str, Any]] # type: ignore
) -> db.Model: # type: ignore
"""
Merge updated metadata filed with existing fields
"""
"""Merge updated metadata filed with existing fields."""

if isinstance(input_data, BaseModel):
input_data = input_data.dict(skip_defaults=True)

Expand Down
5 changes: 2 additions & 3 deletions app/crud/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@


async def get_datasets() -> List[ORMDataset]:
"""
Get list of all datasets
"""
"""Get list of all datasets."""

rows = await db.all(all_datasets)
return rows

Expand Down
5 changes: 2 additions & 3 deletions app/crud/versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,8 @@ async def get_version(dataset: str, version: str) -> ORMVersion:


async def get_latest_version(dataset) -> str:
"""
Fetch latest version number
"""
"""Fetch latest version number."""

latest: Optional[str] = await ORMVersion.select("version").where(
ORMVersion.dataset == dataset
).where(ORMVersion.is_latest).gino.scalar()
Expand Down
4 changes: 4 additions & 0 deletions app/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@

async def set_db_mode(request: Request, call_next):
"""
This middleware replaces the db engine depending on the request type.
Read requests use the read only pool.
Write requests use the write pool.
"""

async with ContextEngine(request.method):
Expand All @@ -19,7 +21,9 @@ async def set_db_mode(request: Request, call_next):

async def redirect_latest(request: Request, call_next):
"""
Redirect all GET requests using latest version to actual version number.
"""

if request.method == "GET" and "latest" in request.url.path:
Expand Down
1 change: 0 additions & 1 deletion app/models/orm/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from datetime import datetime

import geoalchemy2
from geoalchemy2 import Geometry
from sqlalchemy.dialects.postgresql import ARRAY, JSONB, TEXT, UUID
from sqlalchemy_utils import EmailType, generic_repr
Expand Down
12 changes: 8 additions & 4 deletions app/models/orm/migrations/env.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
""" env.py
Alembic ENV module
Alembic ENV module
isort:skip_file
"""

Expand Down Expand Up @@ -46,15 +46,17 @@ def exclude_tables_from_config(config_):
exclude_tables = exclude_tables_from_config(config.get_section("alembic:exclude"))


def include_object(object, name, type_, reflected, compare_to):
def include_object(obj, name, type_, reflected, compare_to):
if type_ == "table" and name in exclude_tables:
return False
else:
return True


def run_migrations_offline():
"""Run migrations in 'offline' mode.
"""
Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
Expand All @@ -77,7 +79,9 @@ def run_migrations_offline():


def run_migrations_online():
"""Run migrations in 'online' mode.
"""
Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
Expand Down
4 changes: 2 additions & 2 deletions app/models/pydantic/creation_options.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from datetime import date
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import List, Optional, Union

from pydantic import BaseModel, Field
from pydantic.types import PositiveInt, constr
from pydantic.types import PositiveInt

COLUMN_REGEX = r"^[a-z][a-zA-Z0-9_-]{2,}$"
PARTITION_SUFFIX_REGEX = r"^[a-z0-9_-]{3,}$"
Expand Down
2 changes: 1 addition & 1 deletion app/models/pydantic/database.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Any, Optional, Union

from pydantic import BaseModel, Field, Schema, fields, validator
from pydantic import BaseModel, Field, fields, validator
from sqlalchemy.engine.url import URL
from starlette.datastructures import Secret

Expand Down
2 changes: 1 addition & 1 deletion app/models/pydantic/jobs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional
from typing import Dict, List, Optional

from pydantic import BaseModel

Expand Down
2 changes: 1 addition & 1 deletion app/models/pydantic/metadata.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import date
from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional

from pydantic import BaseModel, Field

Expand Down
9 changes: 4 additions & 5 deletions app/models/pydantic/versions.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import List, Optional, Tuple

from pydantic import BaseModel, Field
from pydantic import BaseModel

from .base import Base
from .change_log import ChangeLog
from .creation_options import CreationOptions, VectorSourceCreationOptions
from .metadata import FieldMetadata, VersionMetadata
from .creation_options import CreationOptions
from .metadata import VersionMetadata
from .sources import SourceType


Expand Down
20 changes: 2 additions & 18 deletions app/routes/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
from typing import Tuple

import requests
from fastapi import Depends, Form, HTTPException, Path
from fastapi import Depends, HTTPException, Path
from fastapi.logger import logger
from fastapi.security import OAuth2PasswordBearer

from app.crud.versions import get_latest_version

DATASET_REGEX = r"^[a-z][a-z0-9_-]{2,}$"
VERSION_REGEX = r"^v\d{1,8}\.?\d{1,3}\.?\d{1,3}$|^latest$"
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
Expand Down Expand Up @@ -35,20 +31,8 @@ async def version_dependency(
return version


# async def version_dependency_form(
# version: str = Form(..., title="Dataset version", regex=VERSION_REGEX)
# ):
#
# if version == "latest":
# version = await get_latest_version
#
# return version


async def is_admin(token: str = Depends(oauth2_scheme)) -> bool:
"""
Calls GFW API to authorize user
"""
"""Calls GFW API to authorize user."""

headers = {"Authorization": f"Bearer {token}"}
url = "https://production-api.globalforestwatch.org/auth/check-logged"
Expand Down
67 changes: 11 additions & 56 deletions app/routes/assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,8 @@ async def get_assets(
version: str = Depends(version_dependency),
asset_type: Optional[AssetType] = Query(None, title="Filter by Asset Type"),
):
"""
Get all assets for a given dataset version
"""
"""Get all assets for a given dataset version."""

rows: List[ORMAsset] = await assets.get_assets(dataset, version)

# Filter rows by asset type
Expand All @@ -65,9 +64,7 @@ async def get_asset(
version: str = Depends(version_dependency),
asset_id: UUID = Path(...),
):
"""
Get a specific asset
"""
"""Get a specific asset."""
row: ORMAsset = await assets.get_asset(asset_id)

if row.dataset != dataset and row.version != version:
Expand All @@ -88,9 +85,7 @@ async def get_asset(
async def get_assets_root(
*, asset_type: Optional[AssetType] = Query(None, title="Filter by Asset Type")
):
"""
Get all assets
"""
"""Get all assets."""
if asset_type:
rows: List[ORMAsset] = await assets.get_assets_by_type(asset_type)
else:
Expand All @@ -106,9 +101,7 @@ async def get_assets_root(
response_model=Asset,
)
async def get_asset_root(*, asset_id: UUID = Path(...)):
"""
Get a specific asset
"""
"""Get a specific asset."""
row: ORMAsset = await assets.get_asset(asset_id)
return row

Expand All @@ -129,10 +122,12 @@ async def add_new_asset(
response: Response,
):
"""
Add a new asset to a dataset version. Managed assets will be generated by the API itself.
In that case, the Asset URI is read only and will be set automatically.
If the asset is not managed, you need to specify an Asset URI to link to.
"""
# row: ORMAsset = ...
# response.headers["Location"] = f"/{dataset}/{version}/asset/{row.asset_id}"
Expand All @@ -154,8 +149,10 @@ async def delete_asset(
is_authorized: bool = Depends(is_admin),
):
"""
Delete selected asset.
For managed assets, all resources will be deleted. For non-managed assets, only the link will be deleted.
"""
pass

Expand All @@ -169,61 +166,19 @@ async def asset_history(
request: ChangeLog,
is_authorized: bool = Depends(is_admin),
):
"""
Log changes for given asset
"""
"""Log changes for given asset."""

row = await assets.update_asset(asset_id, change_log=[request.dict()])

return await _asset_response(row)


async def _asset_response(data: ORMAsset) -> Dict[str, Any]:
"""
Serialize ORM response
"""
"""Serialize ORM response."""
response = Asset.from_orm(data).dict(by_alias=True)
return response


async def _create_database_table():
# supported input types
# - table
# - vector

# steps
# - create table and upload data into database
# - inherit from geostore (vector/ polygon only)

# creation options:
# - indicies (which fields get an index, what kind of index?)
# - partitioning (how to partition table, on what field?)
# - cluster (how to cluster table, based on which index?)
# - force field types (don't let loader guess field types, but provide list of field types instead)

# custom metadata
# - fields (field name, field alias, field description, field type, is filter, is feature info)

raise NotImplementedError


async def _create_dynamic_vector_tile_cache():
# supported input types
# - vector

# steps
# - wait until database table is created
# - create dynamic vector tile asset entry to enable service

# creation options:
# - default symbology/ legend

# custom metadata
# - default symbology/ legend

raise NotImplementedError


async def _create_static_vector_tile_cache():
# supported input types
# - vector
Expand Down
Loading

0 comments on commit 22a2ed0

Please sign in to comment.