Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
RyEggGit committed Jan 11, 2024
1 parent da9cfca commit 72dab26
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 11 deletions.
24 changes: 17 additions & 7 deletions backend/database/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,13 @@ def get(cls, id: Any, abort_if_null: bool = True):

"""https://stackoverflow.com/questions/18147435/how-to-exclude-specific-fields-on-serialization-with-jsonpickle"""

def __getstate__(self, stringify_dates: list[str] = []):
def __getstate__(self):
"""
Get the state of the object for pickling.
Args:
stringify_dates (bool): Whether to convert datetime objects to strings.
stringify_dates (bool): Whether to convert datetime objects to
strings.
Returns:
dict: The state of the object.
Expand All @@ -70,12 +71,15 @@ def __setstate__(self, state: dict[str, Any]):
Set the state of the object using the provided dictionary.
Args:
state (dict[str, Any]): The dictionary containing the state of the object.
state (dict[str, Any]): The dictionary containing the state of the
object.
"""
self.__dict__.update(state)


QUERIES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "queries"))
QUERIES_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), "queries")
)


def execute_query(filename: str) -> Optional[pd.DataFrame]:
Expand Down Expand Up @@ -127,13 +131,17 @@ def db_cli(ctx: click.Context):
@pass_psql_admin_connection
@click.pass_context
@dev_only
def create_database(ctx: click.Context, conn: connection, overwrite: bool = False):
def create_database(
ctx: click.Context, conn: connection, overwrite: bool = False
):
"""Create the database from nothing."""
database = current_app.config["POSTGRES_DB"]
cursor = conn.cursor()

if overwrite:
cursor.execute(f"SELECT bool_or(datname = '{database}') FROM pg_database;")
cursor.execute(
f"SELECT bool_or(datname = '{database}') FROM pg_database;"
)
exists = cursor.fetchall()[0][0]
if exists:
ctx.invoke(delete_database)
Expand Down Expand Up @@ -194,7 +202,9 @@ def delete_database(conn: connection, test_db: bool):
)
confirmation = click.prompt("Database name")
if database != confirmation:
click.echo("The input does not match. " "The database will not be deleted.")
click.echo(
"The input does not match. " "The database will not be deleted."
)
return None

try:
Expand Down
4 changes: 2 additions & 2 deletions backend/scraper/run_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ def add_to_database(
None
"""
logger = logging.Logger("scrape")
if cache.get_json(uid, table):
model_dict = model.__getstate__()
if cache.get_json(uid, table) == model_dict:
logger.info(f"{table} {uid} already in cache")
return

Expand All @@ -49,7 +50,6 @@ def add_to_database(
model.create()

# add the model to the cache
model_dict = model.__getstate__()
try:
cache.set_json(uid, model_dict, table)
except Exception as e:
Expand Down
2 changes: 1 addition & 1 deletion backend/scraper/websites/FiftyA/FiftyAIncidentParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,5 +207,5 @@ def parse_complaint(
incident.victims = victim # type: ignore
incident.use_of_force = force # type: ignore
incident.case_id = int(self.complaint_number(complaint_link))
incident.perpetrators = self._get_officers(soup) # type: ignore
incident.perpetrators = self._get_officers(soup) # type: ignore # noqa: E501
return incident
2 changes: 1 addition & 1 deletion backend/tests/test_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,4 +88,4 @@ def test_incident_exists_with_case_id(db_session: Session):

def test_incident_exists_with_invalid_case_id(db_session: Session):
# Test that a non-existing incident returns False
assert not incident_exists(db_session, Incident(**{"case_id": "654321"}))
assert not incident_exists(db_session, Incident(**{"case_id": "654321"}))

0 comments on commit 72dab26

Please sign in to comment.