Skip to content

Commit

Permalink
Use Ruff (#94)
Browse files Browse the repository at this point in the history
* Replace linting tools with ruff

* Ruff auto-fixes

* Fix lint errors in app.py and DataDocMetadata.py

* Use 3.11 in workflow

* Fix lint errors in DatasetParser.py

* Add import annotations where needed

* Fix lint errors in ModelBackwardsCompatibility.py

* Fix lint errors in StorageAdapter.py

* Remove unused code

* Fix tests

* Fix lint errors in callbacks package

* Fix lint errors in fields package

* Fix lint errors in components package

* Fix lint errors in tests package

* Tidy final lint errors

* Rename to datadoc_metadata.py

* Rename to dataset_parser.py

* Rename to model_backwards_compatibility.py

* Rename to storage_adapter.py

* Rename to alerts.py

* Fix tests

* Rename to builders.py

* Rename to control_bars.py

* Rename to dataset_tab.py

* Rename to variables_tab.py

* Rename to display_base.py

* Rename to display_dataset.py

* Rename to display_variables.py

* Add VSCode config to gitignore

* Actually rename files
  • Loading branch information
mmwinther authored Aug 16, 2023
1 parent f046668 commit 01f593c
Show file tree
Hide file tree
Showing 51 changed files with 1,430 additions and 1,329 deletions.
9 changes: 3 additions & 6 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
with:
python-version: "3.10"
python-version: "3.11"
- uses: Gr1N/setup-poetry@v8
- uses: actions/cache@v2
with:
Expand All @@ -31,12 +31,9 @@ jobs:
- name: Install dependencies
run: |
poetry install --all-extras
- name: Lint with flake8
- name: Commit hooks
run: |
# stop the build if there are Python syntax errors or undefined names
poetry run pflake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
poetry run pflake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
poetry run pre-commit run --all-files
- name: Run unit tests
run: |
set -o pipefail; poetry run pytest -v --cache-clear --junitxml=pytest.xml --cov-report=term-missing --cov=datadoc | tee pytest-coverage.txt
Expand Down
6 changes: 3 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# The file created from the example parquet file
klargjorte_data/person_data_v1__DOC.json

# This file is changed every time we run it, avoid that being committed
DataDoc.ipynb

# Jetbrains IDE config
.idea/

# VSCode config
.vscode/

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
40 changes: 14 additions & 26 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
rev: v4.4.0
hooks:
- id: check-ast
- id: check-added-large-files
Expand All @@ -13,30 +13,18 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: mixed-line-ending
- repo: local
- repo: https://github.com/psf/black
rev: 23.7.0
hooks:
- id: autoflake
name: autoflake
entry: poetry run autoflake -r -i --remove-all-unused-imports --remove-unused-variables
language: system
types: [python]
- id: isort
name: isort
entry: poetry run isort
language: system
types: [python]
- id: black
name: black
entry: poetry run black
language: system
types: [python]
- id: pyupgrade
name: pyupgrade
entry: poetry run pyupgrade --py37-plus
language: system
types: [python]
- id: flake8
name: flake8
entry: poetry run pflake8
language: system
types: [python]
# It is recommended to specify the latest version of Python
# supported by your project here, or alternatively use
# pre-commit's default_language_version, see
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.11
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.0.283
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"--max-line-length=88",
"--ignore=E402,F841,F401,E302,E305,W503,E501"
],
"python.linting.enabled": true,
"python.linting.enabled": false,
"python.linting.pylintEnabled": false,
"python.linting.mypyEnabled": false,
"python.languageServer": "Pylance",
Expand Down
2 changes: 1 addition & 1 deletion SECURITY.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Security Policy

SSB takes the security of our software products and services seriously, which
SSB takes the security of our software products and services seriously, which
includes all source code repositories managed through our GitHub organization.

We believe that responsible disclosure of security vulnerabilities helps us ensure
Expand Down
4 changes: 3 additions & 1 deletion datadoc/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
from datadoc.app import main # noqa
"""Datadoc: Document datasets in Statistics Norway."""

from datadoc.app import main
47 changes: 28 additions & 19 deletions datadoc/app.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,35 @@
"""Top-level entrypoint, configuration and layout for the datadoc app.
Members of this module should not be imported into any sub-modules, this will cause circular imports.
"""
from __future__ import annotations

import logging
import os
from pathlib import Path

import dash_bootstrap_components as dbc
from dash import Dash
from datadoc_model.Enums import SupportedLanguages
from flask_healthz import healthz

import datadoc.state as state
from datadoc.backend.DataDocMetadata import DataDocMetadata
from datadoc import state
from datadoc.backend.datadoc_metadata import DataDocMetadata
from datadoc.frontend.callbacks.register_callbacks import register_callbacks
from datadoc.frontend.components.Alerts import (
from datadoc.frontend.components.alerts import (
dataset_validation_error,
opened_dataset_error,
opened_dataset_success,
saved_metadata_success,
variables_validation_error,
)
from datadoc.frontend.components.DatasetTab import get_dataset_tab
from datadoc.frontend.components.HeaderBars import (
get_controls_bar,
get_language_dropdown,
from datadoc.frontend.components.control_bars import (
build_controls_bar,
build_language_dropdown,
header,
progress_bar,
)
from datadoc.frontend.components.VariablesTab import get_variables_tab
from datadoc.frontend.components.dataset_tab import build_dataset_tab
from datadoc.frontend.components.variables_tab import build_variables_tab
from datadoc.utils import get_app_version, pick_random_port, running_in_notebook

logger = logging.getLogger(__name__)
Expand All @@ -32,18 +38,19 @@


def build_app() -> Dash:
"""Instantiate the Dash app object, define the layout, register callbacks."""
app = Dash(
name=NAME,
title=NAME,
assets_folder=f"{os.path.dirname(__file__)}/assets",
assets_folder=f"{Path(__file__).parent}/assets",
)

app.layout = dbc.Container(
style={"padding": "4px"},
children=[
header,
progress_bar,
get_controls_bar(),
build_controls_bar(),
variables_validation_error,
dataset_validation_error,
opened_dataset_error,
Expand All @@ -56,13 +63,13 @@ def build_app() -> Dash:
id="tabs",
class_name="ssb-tabs",
children=[
get_dataset_tab(),
get_variables_tab(),
build_dataset_tab(),
build_variables_tab(),
],
),
],
),
get_language_dropdown(),
build_language_dropdown(),
],
)

Expand All @@ -71,9 +78,10 @@ def build_app() -> Dash:
return app


def get_app(dataset_path: str = None) -> Dash:
def get_app(dataset_path: str | None = None) -> Dash:
"""Centralize all the ugliness around initializing the app."""
logging.basicConfig(level=logging.INFO, force=True)
logger.info(f"Datadoc version v{get_app_version()}")
logger.info("Datadoc version v%s", get_app_version())
state.current_metadata_language = SupportedLanguages.NORSK_BOKMÅL
state.metadata = DataDocMetadata(dataset_path)
app = build_app()
Expand All @@ -88,15 +96,16 @@ def get_app(dataset_path: str = None) -> Dash:
return app


def main(dataset_path: str = None):
def main(dataset_path: str | None = None) -> None:
"""Entrypoint when running as a script."""
logging.basicConfig(level=logging.DEBUG, force=True)
logger.info(f"Starting app with {dataset_path = }")
logger.info("Starting app with dataset_path = %s", dataset_path)
app = get_app(dataset_path)
if running_in_notebook():
logger.info("Running in notebook")
port = pick_random_port()
app.run(jupyter_height=1000, port=port)
logger.info(f"Server running on port {port}")
logger.info("Server running on port %s", port)
else:
# Assume running in server mode is better (largely for development purposes)
logging.basicConfig(level=logging.DEBUG, force=True)
Expand Down
2 changes: 1 addition & 1 deletion datadoc/assets/bootstrap.min.css

Large diffs are not rendered by default.

Loading

0 comments on commit 01f593c

Please sign in to comment.