From 7aa4e892d2a6f1e7fe75d062f908f70d006b4841 Mon Sep 17 00:00:00 2001 From: Harris Tzovanakis Date: Thu, 25 Jul 2024 15:31:42 +0200 Subject: [PATCH] WIP Compine all (#55) global: change the project structure * It merges Airflow and Django into a mono repo, it has breaking changes. --- .envs/docker/.django | 23 -- .envs/local/.postgres | 7 - .../{ => backoffice}/integration-tests.yml | 0 .github/workflows/{ => backoffice}/test.yml | 4 +- .github/workflows/lint.yml | 16 +- .../pull-request-main-backoffice.yml | 18 + ...in.yml => pull-request-main-workflows.yml} | 5 +- .github/workflows/push-main-backoffice.yml | 34 ++ ...{push-main.yml => push-main-workflows.yml} | 4 +- .github/workflows/workflows/test.yml | 57 +++ .pre-commit-config.yaml | 30 +- .python-version | 1 - CONTRIBUTORS.txt | 1 - Makefile | 28 ++ {.envs => backoffice/.envs}/local/.django | 5 +- .../.envs/local}/.postgres | 4 +- .../django => backoffice}/Dockerfile | 0 backoffice/__init__.py | 2 - backoffice/backoffice/__init__.py | 5 + backoffice/{ => backoffice}/conftest.py | 4 +- .../{ => backoffice}/contrib/__init__.py | 0 .../contrib/sites/__init__.py | 0 .../contrib/sites/migrations/0001_initial.py | 1 - .../migrations/0002_alter_domain_unique.py | 1 - .../0003_set_site_domain_and_name.py | 2 +- .../0004_alter_options_ordering_domain.py | 1 - .../contrib/sites/migrations/__init__.py | 0 .../{ => backoffice}/fixtures/groups.json | 0 .../{ => backoffice}/management/__init__.py | 0 .../{ => backoffice}/management/apps.py | 0 .../{ => backoffice}/management/groups.py | 0 .../management/management/__init__.py | 0 .../management/commands/__init__.py | 0 .../management/commands/create_groups.py | 8 +- .../management/migrations/__init__.py | 0 .../management/permissions.py | 0 .../management/tests/test_commands.py | 0 .../management/tests/test_permissions.py | 4 +- .../{ => backoffice}/static/css/project.css | 0 .../{ => backoffice}/static/fonts/.gitkeep | 0 .../static/images/favicons/favicon.ico | Bin .../{ => backoffice}/static/js/project.js | 0 .../{ => backoffice}/templates/403.html | 0 .../{ => backoffice}/templates/403_csrf.html | 0 .../{ => backoffice}/templates/404.html | 0 .../{ => backoffice}/templates/500.html | 0 .../templates/account/account_inactive.html | 0 .../templates/account/base.html | 0 .../templates/account/email.html | 0 .../templates/account/email_confirm.html | 0 .../templates/account/login.html | 0 .../templates/account/logout.html | 0 .../templates/account/password_change.html | 0 .../templates/account/password_reset.html | 0 .../account/password_reset_done.html | 0 .../account/password_reset_from_key.html | 0 .../account/password_reset_from_key_done.html | 0 .../templates/account/password_set.html | 0 .../templates/account/signup.html | 0 .../templates/account/signup_closed.html | 0 .../templates/account/verification_sent.html | 0 .../account/verified_email_required.html | 0 .../{ => backoffice}/templates/base.html | 4 +- .../templates/pages/about.html | 2 - .../templates/pages/home.html | 2 - .../templates/users/user_detail.html | 14 +- .../templates/users/user_form.html | 8 +- backoffice/{ => backoffice}/users/__init__.py | 0 backoffice/{ => backoffice}/users/adapters.py | 11 +- backoffice/{ => backoffice}/users/admin.py | 0 .../{ => backoffice}/users/api/serializers.py | 0 .../{ => backoffice}/users/api/views.py | 3 +- backoffice/{ => backoffice}/users/apps.py | 1 + .../users/context_processors.py | 0 .../backoffice/users/fixtures/tokens.json | 10 + .../backoffice/users/fixtures/users.json | 38 ++ backoffice/{ => backoffice}/users/forms.py | 0 backoffice/{ => backoffice}/users/managers.py | 0 .../users/migrations/0001_initial.py | 27 +- .../users/migrations/__init__.py | 0 backoffice/{ => backoffice}/users/models.py | 0 backoffice/{ => backoffice}/users/tasks.py | 3 +- .../{ => backoffice}/users/tests/__init__.py | 0 .../{ => backoffice}/users/tests/factories.py | 0 .../users/tests/test_admin.py | 0 .../users/tests/test_drf_urls.py | 4 +- .../users/tests/test_drf_views.py | 2 +- .../users/tests/test_forms.py | 1 + .../users/tests/test_managers.py | 4 +- .../users/tests/test_models.py | 0 .../users/tests/test_swagger.py | 2 +- .../users/tests/test_tasks.py | 0 .../{ => backoffice}/users/tests/test_urls.py | 0 .../users/tests/test_views.py | 0 backoffice/{ => backoffice}/users/urls.py | 6 +- backoffice/{ => backoffice}/users/views.py | 4 +- backoffice/{ => backoffice}/utils/__init__.py | 0 .../{ => backoffice}/utils/pagination.py | 0 backoffice/{ => backoffice}/utils/storages.py | 0 .../{ => backoffice}/workflows/__init__.py | 0 .../{ => backoffice}/workflows/admin.py | 33 +- .../workflows/airflow_utils.py | 21 +- .../workflows/api/__init__.py | 0 .../workflows/api/serializers.py | 3 +- .../{ => backoffice}/workflows/api/views.py | 51 ++- backoffice/{ => backoffice}/workflows/apps.py | 0 .../{ => backoffice}/workflows/constants.py | 0 .../{ => backoffice}/workflows/documents.py | 0 .../workflows/migrations/0001_initial.py | 15 +- ...orkflow_remove_workflowmeta_id_and_more.py | 13 +- .../migrations/0003_workflowticket.py | 17 +- .../migrations/0004_workflow_workflow_type.py | 0 ...icket_ticket_type_alter_workflow_status.py | 0 ...rkflow__created_at_workflow__updated_at.py | 6 +- ..._workflow_core_alter_workflow_is_update.py | 0 ...low_status_alter_workflow_workflow_type.py | 0 .../workflows/migrations/__init__.py | 0 .../{ => backoffice}/workflows/models.py | 10 +- .../workflows/tests/test_views.py | 77 +++- {config => backoffice/config}/__init__.py | 2 +- {config => backoffice/config}/api_router.py | 17 +- {config => backoffice/config}/asgi.py | 1 + {config => backoffice/config}/celery_app.py | 0 .../config}/search_router.py | 5 +- .../config}/settings/__init__.py | 0 .../config}/settings/base.py | 25 +- .../config}/settings/local.py | 2 +- .../config}/settings/production.py | 13 +- .../config}/settings/test.py | 3 +- {config => backoffice/config}/urls.py | 4 +- {config => backoffice/config}/websocket.py | 5 +- {config => backoffice/config}/wsgi.py | 1 + {locale => backoffice/locale}/README.md | 0 .../locale}/en_US/LC_MESSAGES/django.po | 0 .../locale}/fr_FR/LC_MESSAGES/django.po | 0 .../locale}/pt_BR/LC_MESSAGES/django.po | 0 manage.py => backoffice/manage.py | 1 + .../merge_production_dotenvs_in_dotenv.py | 0 poetry.lock => backoffice/poetry.lock | 12 +- pyproject.toml => backoffice/pyproject.toml | 1 + ...test_merge_production_dotenvs_in_dotenv.py | 1 - compose/local/django/Dockerfile | 30 -- compose/local/django/celery/beat/start | 8 - compose/local/django/celery/flower/start | 8 - compose/local/django/celery/worker/start | 7 - compose/local/django/start | 9 - compose/production/django/entrypoint | 49 --- compose/production/django/start | 10 - compose/production/nginx/Dockerfile | 2 - compose/production/nginx/default.conf | 7 - compose/production/postgres/Dockerfile | 6 - .../maintenance/_sourced/constants.sh | 5 - .../maintenance/_sourced/countdown.sh | 12 - .../postgres/maintenance/_sourced/messages.sh | 41 --- .../postgres/maintenance/_sourced/yes_no.sh | 16 - .../production/postgres/maintenance/backup | 38 -- .../production/postgres/maintenance/backups | 22 -- .../production/postgres/maintenance/restore | 55 --- docker-compose.yaml | 344 ++++++++++++++++++ local.yml | 113 ------ production.yml | 69 ---- requirements/base.txt | 24 -- requirements/local.txt | 34 -- requirements/production.txt | 13 - ruff.toml | 39 ++ workflows/Dockerfile | 9 + workflows/dags/__init__.py | 0 workflows/dags/author/__init__.py | 0 workflows/dags/author/author_create/README.md | 0 .../dags/author/author_create/__init__.py | 0 .../author_create/author_create_approved.py | 190 ++++++++++ .../author_create/author_create_init.py | 95 +++++ .../author_create/author_create_rejected.py | 71 ++++ workflows/dags/author/author_update/README.md | 0 .../dags/author/author_update/__init__.py | 0 .../author/author_update/author_update.py | 134 +++++++ workflows/dags/happy_flow_dag.py | 66 ++++ workflows/dags/process_until_breakpoint.py | 73 ++++ workflows/logs/scheduler/latest | 1 + workflows/plugins/__init__.py | 0 workflows/plugins/hooks/__init__.py | 0 .../plugins/hooks/backoffice/__init__.py | 0 workflows/plugins/hooks/backoffice/base.py | 60 +++ .../backoffice/workflow_management_hook.py | 58 +++ .../workflow_ticket_management_hook.py | 50 +++ .../plugins/hooks/inspirehep/__init__.py | 0 .../hooks/inspirehep/inspire_http_hook.py | 65 ++++ .../inspire_http_record_management_hook.py | 47 +++ workflows/plugins/hooks/tenacity_config.py | 9 + workflows/plugins/include/__init__.py | 0 workflows/plugins/include/inspire/__init__.py | 0 .../inspire/affiliations_normalization.py | 10 + workflows/plugins/include/inspire/approval.py | 70 ++++ workflows/plugins/include/utils/__init__.py | 0 workflows/plugins/include/utils/s3_client.py | 14 + .../include/utils/set_workflow_status.py | 46 +++ workflows/plugins/operators/__init__.py | 0 .../operators/short_circuit_operator.py | 55 +++ workflows/requirements-test.txt | 3 + workflows/requirements.txt | 1 + .../scripts/connections/connections.json | 32 ++ workflows/scripts/setup_s3.sh | 5 + workflows/scripts/setup_tables_inspire_db.sh | 12 + workflows/scripts/variables/variables.json | 4 + workflows/tests/test_example.py | 2 + 205 files changed, 2100 insertions(+), 793 deletions(-) delete mode 100644 .envs/docker/.django delete mode 100644 .envs/local/.postgres rename .github/workflows/{ => backoffice}/integration-tests.yml (100%) rename .github/workflows/{ => backoffice}/test.yml (88%) create mode 100644 .github/workflows/pull-request-main-backoffice.yml rename .github/workflows/{pull-request-main.yml => pull-request-main-workflows.yml} (75%) create mode 100644 .github/workflows/push-main-backoffice.yml rename .github/workflows/{push-main.yml => push-main-workflows.yml} (88%) create mode 100644 .github/workflows/workflows/test.yml delete mode 100644 .python-version delete mode 100644 CONTRIBUTORS.txt create mode 100644 Makefile rename {.envs => backoffice/.envs}/local/.django (81%) rename {.envs/docker => backoffice/.envs/local}/.postgres (72%) rename {compose/production/django => backoffice}/Dockerfile (100%) delete mode 100644 backoffice/__init__.py create mode 100644 backoffice/backoffice/__init__.py rename backoffice/{ => backoffice}/conftest.py (80%) rename backoffice/{ => backoffice}/contrib/__init__.py (100%) rename backoffice/{ => backoffice}/contrib/sites/__init__.py (100%) rename backoffice/{ => backoffice}/contrib/sites/migrations/0001_initial.py (99%) rename backoffice/{ => backoffice}/contrib/sites/migrations/0002_alter_domain_unique.py (99%) rename backoffice/{ => backoffice}/contrib/sites/migrations/0003_set_site_domain_and_name.py (100%) rename backoffice/{ => backoffice}/contrib/sites/migrations/0004_alter_options_ordering_domain.py (99%) rename backoffice/{ => backoffice}/contrib/sites/migrations/__init__.py (100%) rename backoffice/{ => backoffice}/fixtures/groups.json (100%) rename backoffice/{ => backoffice}/management/__init__.py (100%) rename backoffice/{ => backoffice}/management/apps.py (100%) rename backoffice/{ => backoffice}/management/groups.py (100%) rename backoffice/{ => backoffice}/management/management/__init__.py (100%) rename backoffice/{ => backoffice}/management/management/commands/__init__.py (100%) rename backoffice/{ => backoffice}/management/management/commands/create_groups.py (80%) rename backoffice/{ => backoffice}/management/migrations/__init__.py (100%) rename backoffice/{ => backoffice}/management/permissions.py (100%) rename backoffice/{ => backoffice}/management/tests/test_commands.py (100%) rename backoffice/{ => backoffice}/management/tests/test_permissions.py (92%) rename backoffice/{ => backoffice}/static/css/project.css (100%) rename backoffice/{ => backoffice}/static/fonts/.gitkeep (100%) rename backoffice/{ => backoffice}/static/images/favicons/favicon.ico (100%) rename backoffice/{ => backoffice}/static/js/project.js (100%) rename backoffice/{ => backoffice}/templates/403.html (100%) rename backoffice/{ => backoffice}/templates/403_csrf.html (100%) rename backoffice/{ => backoffice}/templates/404.html (100%) rename backoffice/{ => backoffice}/templates/500.html (100%) rename backoffice/{ => backoffice}/templates/account/account_inactive.html (100%) rename backoffice/{ => backoffice}/templates/account/base.html (100%) rename backoffice/{ => backoffice}/templates/account/email.html (100%) rename backoffice/{ => backoffice}/templates/account/email_confirm.html (100%) rename backoffice/{ => backoffice}/templates/account/login.html (100%) rename backoffice/{ => backoffice}/templates/account/logout.html (100%) rename backoffice/{ => backoffice}/templates/account/password_change.html (100%) rename backoffice/{ => backoffice}/templates/account/password_reset.html (100%) rename backoffice/{ => backoffice}/templates/account/password_reset_done.html (100%) rename backoffice/{ => backoffice}/templates/account/password_reset_from_key.html (100%) rename backoffice/{ => backoffice}/templates/account/password_reset_from_key_done.html (100%) rename backoffice/{ => backoffice}/templates/account/password_set.html (100%) rename backoffice/{ => backoffice}/templates/account/signup.html (100%) rename backoffice/{ => backoffice}/templates/account/signup_closed.html (100%) rename backoffice/{ => backoffice}/templates/account/verification_sent.html (100%) rename backoffice/{ => backoffice}/templates/account/verified_email_required.html (100%) rename backoffice/{ => backoffice}/templates/base.html (99%) rename backoffice/{ => backoffice}/templates/pages/about.html (92%) rename backoffice/{ => backoffice}/templates/pages/home.html (92%) rename backoffice/{ => backoffice}/templates/users/user_detail.html (94%) rename backoffice/{ => backoffice}/templates/users/user_form.html (97%) rename backoffice/{ => backoffice}/users/__init__.py (100%) rename backoffice/{ => backoffice}/users/adapters.py (82%) rename backoffice/{ => backoffice}/users/admin.py (100%) rename backoffice/{ => backoffice}/users/api/serializers.py (100%) rename backoffice/{ => backoffice}/users/api/views.py (94%) rename backoffice/{ => backoffice}/users/apps.py (93%) rename backoffice/{ => backoffice}/users/context_processors.py (100%) create mode 100644 backoffice/backoffice/users/fixtures/tokens.json create mode 100644 backoffice/backoffice/users/fixtures/users.json rename backoffice/{ => backoffice}/users/forms.py (100%) rename backoffice/{ => backoffice}/users/managers.py (100%) rename backoffice/{ => backoffice}/users/migrations/0001_initial.py (81%) rename backoffice/{ => backoffice}/users/migrations/__init__.py (100%) rename backoffice/{ => backoffice}/users/models.py (100%) rename backoffice/{ => backoffice}/users/tasks.py (99%) rename backoffice/{ => backoffice}/users/tests/__init__.py (100%) rename backoffice/{ => backoffice}/users/tests/factories.py (100%) rename backoffice/{ => backoffice}/users/tests/test_admin.py (100%) rename backoffice/{ => backoffice}/users/tests/test_drf_urls.py (81%) rename backoffice/{ => backoffice}/users/tests/test_drf_views.py (97%) rename backoffice/{ => backoffice}/users/tests/test_forms.py (99%) rename backoffice/{ => backoffice}/users/tests/test_managers.py (96%) rename backoffice/{ => backoffice}/users/tests/test_models.py (100%) rename backoffice/{ => backoffice}/users/tests/test_swagger.py (95%) rename backoffice/{ => backoffice}/users/tests/test_tasks.py (100%) rename backoffice/{ => backoffice}/users/tests/test_urls.py (100%) rename backoffice/{ => backoffice}/users/tests/test_views.py (100%) rename backoffice/{ => backoffice}/users/urls.py (70%) rename backoffice/{ => backoffice}/users/views.py (89%) rename backoffice/{ => backoffice}/utils/__init__.py (100%) rename backoffice/{ => backoffice}/utils/pagination.py (100%) rename backoffice/{ => backoffice}/utils/storages.py (100%) rename backoffice/{ => backoffice}/workflows/__init__.py (100%) rename backoffice/{ => backoffice}/workflows/admin.py (75%) rename backoffice/{ => backoffice}/workflows/airflow_utils.py (62%) rename backoffice/{ => backoffice}/workflows/api/__init__.py (100%) rename backoffice/{ => backoffice}/workflows/api/serializers.py (93%) rename backoffice/{ => backoffice}/workflows/api/views.py (74%) rename backoffice/{ => backoffice}/workflows/apps.py (100%) rename backoffice/{ => backoffice}/workflows/constants.py (100%) rename backoffice/{ => backoffice}/workflows/documents.py (100%) rename backoffice/{ => backoffice}/workflows/migrations/0001_initial.py (86%) rename backoffice/{ => backoffice}/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py (82%) rename backoffice/{ => backoffice}/workflows/migrations/0003_workflowticket.py (53%) rename backoffice/{ => backoffice}/workflows/migrations/0004_workflow_workflow_type.py (100%) rename backoffice/{ => backoffice}/workflows/migrations/0005_workflowticket_ticket_type_alter_workflow_status.py (100%) rename backoffice/{ => backoffice}/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py (82%) rename backoffice/{ => backoffice}/workflows/migrations/0007_alter_workflow_core_alter_workflow_is_update.py (100%) rename backoffice/{ => backoffice}/workflows/migrations/0008_alter_workflow_status_alter_workflow_workflow_type.py (100%) rename backoffice/{ => backoffice}/workflows/migrations/__init__.py (100%) rename backoffice/{ => backoffice}/workflows/models.py (77%) rename backoffice/{ => backoffice}/workflows/tests/test_views.py (80%) rename {config => backoffice/config}/__init__.py (73%) rename {config => backoffice/config}/api_router.py (56%) rename {config => backoffice/config}/asgi.py (99%) rename {config => backoffice/config}/celery_app.py (100%) rename {config => backoffice/config}/search_router.py (78%) rename {config => backoffice/config}/settings/__init__.py (100%) rename {config => backoffice/config}/settings/base.py (96%) rename {config => backoffice/config}/settings/local.py (98%) rename {config => backoffice/config}/settings/production.py (96%) rename {config => backoffice/config}/settings/test.py (97%) rename {config => backoffice/config}/urls.py (96%) rename {config => backoffice/config}/websocket.py (63%) rename {config => backoffice/config}/wsgi.py (99%) rename {locale => backoffice/locale}/README.md (100%) rename {locale => backoffice/locale}/en_US/LC_MESSAGES/django.po (100%) rename {locale => backoffice/locale}/fr_FR/LC_MESSAGES/django.po (100%) rename {locale => backoffice/locale}/pt_BR/LC_MESSAGES/django.po (100%) rename manage.py => backoffice/manage.py (98%) rename merge_production_dotenvs_in_dotenv.py => backoffice/merge_production_dotenvs_in_dotenv.py (100%) rename poetry.lock => backoffice/poetry.lock (99%) rename pyproject.toml => backoffice/pyproject.toml (99%) rename {tests => backoffice/tests}/test_merge_production_dotenvs_in_dotenv.py (99%) delete mode 100644 compose/local/django/Dockerfile delete mode 100644 compose/local/django/celery/beat/start delete mode 100644 compose/local/django/celery/flower/start delete mode 100644 compose/local/django/celery/worker/start delete mode 100644 compose/local/django/start delete mode 100644 compose/production/django/entrypoint delete mode 100644 compose/production/django/start delete mode 100644 compose/production/nginx/Dockerfile delete mode 100644 compose/production/nginx/default.conf delete mode 100644 compose/production/postgres/Dockerfile delete mode 100644 compose/production/postgres/maintenance/_sourced/constants.sh delete mode 100644 compose/production/postgres/maintenance/_sourced/countdown.sh delete mode 100644 compose/production/postgres/maintenance/_sourced/messages.sh delete mode 100644 compose/production/postgres/maintenance/_sourced/yes_no.sh delete mode 100644 compose/production/postgres/maintenance/backup delete mode 100644 compose/production/postgres/maintenance/backups delete mode 100644 compose/production/postgres/maintenance/restore create mode 100644 docker-compose.yaml delete mode 100644 local.yml delete mode 100644 production.yml delete mode 100644 requirements/base.txt delete mode 100644 requirements/local.txt delete mode 100644 requirements/production.txt create mode 100644 ruff.toml create mode 100644 workflows/Dockerfile create mode 100644 workflows/dags/__init__.py create mode 100644 workflows/dags/author/__init__.py create mode 100644 workflows/dags/author/author_create/README.md create mode 100644 workflows/dags/author/author_create/__init__.py create mode 100644 workflows/dags/author/author_create/author_create_approved.py create mode 100644 workflows/dags/author/author_create/author_create_init.py create mode 100644 workflows/dags/author/author_create/author_create_rejected.py create mode 100644 workflows/dags/author/author_update/README.md create mode 100644 workflows/dags/author/author_update/__init__.py create mode 100644 workflows/dags/author/author_update/author_update.py create mode 100644 workflows/dags/happy_flow_dag.py create mode 100644 workflows/dags/process_until_breakpoint.py create mode 120000 workflows/logs/scheduler/latest create mode 100644 workflows/plugins/__init__.py create mode 100644 workflows/plugins/hooks/__init__.py create mode 100644 workflows/plugins/hooks/backoffice/__init__.py create mode 100644 workflows/plugins/hooks/backoffice/base.py create mode 100644 workflows/plugins/hooks/backoffice/workflow_management_hook.py create mode 100644 workflows/plugins/hooks/backoffice/workflow_ticket_management_hook.py create mode 100644 workflows/plugins/hooks/inspirehep/__init__.py create mode 100644 workflows/plugins/hooks/inspirehep/inspire_http_hook.py create mode 100644 workflows/plugins/hooks/inspirehep/inspire_http_record_management_hook.py create mode 100644 workflows/plugins/hooks/tenacity_config.py create mode 100644 workflows/plugins/include/__init__.py create mode 100644 workflows/plugins/include/inspire/__init__.py create mode 100644 workflows/plugins/include/inspire/affiliations_normalization.py create mode 100644 workflows/plugins/include/inspire/approval.py create mode 100644 workflows/plugins/include/utils/__init__.py create mode 100644 workflows/plugins/include/utils/s3_client.py create mode 100644 workflows/plugins/include/utils/set_workflow_status.py create mode 100644 workflows/plugins/operators/__init__.py create mode 100644 workflows/plugins/operators/short_circuit_operator.py create mode 100644 workflows/requirements-test.txt create mode 100644 workflows/requirements.txt create mode 100644 workflows/scripts/connections/connections.json create mode 100644 workflows/scripts/setup_s3.sh create mode 100644 workflows/scripts/setup_tables_inspire_db.sh create mode 100644 workflows/scripts/variables/variables.json create mode 100644 workflows/tests/test_example.py diff --git a/.envs/docker/.django b/.envs/docker/.django deleted file mode 100644 index 48fbb2a6f..000000000 --- a/.envs/docker/.django +++ /dev/null @@ -1,23 +0,0 @@ -# General -# ------------------------------------------------------------------------------ -USE_DOCKER=yes -IPYTHONDIR=/app/.ipython - -# Redis -# ------------------------------------------------------------------------------ -REDIS_URL=redis://redis:6379/0 - -# Celery -CELERY_BROKER_URL=amqp://guest:guest@mq:5672/ -CELERY_RESULT_BACKEND=redis://redis:6379/2 -# Flower -CELERY_FLOWER_USER=debug -CELERY_FLOWER_PASSWORD=debug - -# OpenSearch -OPENSEARCH_HOST=opensearch:9200 -OPENSEARCH_INDEX_PREFIX=backoffice-backend-local - -# Airflow -AIRFLOW_BASE_URL=http://localhost:8080 -AIRFLOW_TOKEN=CHANGE_ME diff --git a/.envs/local/.postgres b/.envs/local/.postgres deleted file mode 100644 index 0bdd6eda2..000000000 --- a/.envs/local/.postgres +++ /dev/null @@ -1,7 +0,0 @@ -# PostgreSQL -# ------------------------------------------------------------------------------ -POSTGRES_HOST=postgres -POSTGRES_PORT=5432 -POSTGRES_DB=inspire -POSTGRES_USER=inspire -POSTGRES_PASSWORD=inspire \ No newline at end of file diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/backoffice/integration-tests.yml similarity index 100% rename from .github/workflows/integration-tests.yml rename to .github/workflows/backoffice/integration-tests.yml diff --git a/.github/workflows/test.yml b/.github/workflows/backoffice/test.yml similarity index 88% rename from .github/workflows/test.yml rename to .github/workflows/backoffice/test.yml index 1c08cf416..891b3f4db 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/backoffice/test.yml @@ -18,7 +18,7 @@ jobs: with: ref: ${{ inputs.ref }} image: cern-sis/inspire/backoffice - context: . + context: ./backoffice dockerfile: ./compose/production/django/Dockerfile secrets: inherit @@ -26,7 +26,7 @@ jobs: needs: build strategy: fail-fast: false - uses: ./.github/workflows/integration-tests.yml + uses: ./.github/workflows/backoffice/integration-tests.yml with: ref: ${{ inputs.ref }} image: "registry.cern.ch/cern-sis/inspire/backoffice@${{ needs.build.outputs.image-id }}" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fac8b9b65..895f04396 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,17 +17,5 @@ jobs: with: ref: ${{ inputs.ref }} - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.11" - - - name: black - uses: psf/black@stable - with: - options: "--verbose" - - - name: run flake8 - uses: julianwachholz/flake8-action@v2 - with: - checkName: "flake8" + - name: Pre-commit check + uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pull-request-main-backoffice.yml b/.github/workflows/pull-request-main-backoffice.yml new file mode 100644 index 000000000..4a4fd8d82 --- /dev/null +++ b/.github/workflows/pull-request-main-backoffice.yml @@ -0,0 +1,18 @@ +name: Pull request main + +on: + pull_request_target: + branches: [main] + paths: + - 'backoffice/**' + +jobs: + lint: + uses: ./.github/workflows/lint.yml + with: + ref: ${{ github.ref }} + test: + uses: ./.github/workflows/backoffice/test.yml + with: + ref: ${{ github.event.pull_request.head.sha }} + secrets: inherit diff --git a/.github/workflows/pull-request-main.yml b/.github/workflows/pull-request-main-workflows.yml similarity index 75% rename from .github/workflows/pull-request-main.yml rename to .github/workflows/pull-request-main-workflows.yml index 8a045c73e..4c31cd1f5 100644 --- a/.github/workflows/pull-request-main.yml +++ b/.github/workflows/pull-request-main-workflows.yml @@ -3,14 +3,15 @@ name: Pull request main on: pull_request_target: branches: [main] - + paths: + - 'workflows/**' jobs: lint: uses: ./.github/workflows/lint.yml with: ref: ${{ github.ref }} test: - uses: ./.github/workflows/test.yml + uses: ./.github/workflows/workflows/test.yml with: ref: ${{ github.event.pull_request.head.sha }} secrets: inherit diff --git a/.github/workflows/push-main-backoffice.yml b/.github/workflows/push-main-backoffice.yml new file mode 100644 index 000000000..6ef067bdb --- /dev/null +++ b/.github/workflows/push-main-backoffice.yml @@ -0,0 +1,34 @@ +name: Push main + +on: + push: + branches: [main] + paths: + - 'backoffice/**' + +defaults: + run: + shell: bash + +jobs: + lint: + uses: ./.github/workflows/lint.yml + with: + ref: ${{ github.ref }} + test: + needs: lint + uses: ./.github/workflows/backoffice/test.yml + with: + ref: ${{ github.ref }} + secrets: inherit + deploy: + needs: test + runs-on: ubuntu-latest + steps: + - name: send event + uses: cern-sis/gh-workflows/.github/actions/kubernetes-project-new-images@v6.2.0 + with: + event-type: update + images: | + cern-sis/inspire/backoffice@${{ needs.test.outputs.image-id }} + token: ${{ secrets.PAT_FIRE_EVENTS_ON_CERN_SIS_KUBERNETES }} diff --git a/.github/workflows/push-main.yml b/.github/workflows/push-main-workflows.yml similarity index 88% rename from .github/workflows/push-main.yml rename to .github/workflows/push-main-workflows.yml index b07de7a73..9f1e7ba8f 100644 --- a/.github/workflows/push-main.yml +++ b/.github/workflows/push-main-workflows.yml @@ -3,6 +3,8 @@ name: Push main on: push: branches: [main] + paths: + - 'workflows/**' defaults: run: @@ -14,7 +16,7 @@ jobs: with: ref: ${{ github.ref }} test: - uses: ./.github/workflows/test.yml + uses: ./.github/workflows/workflows/test.yml with: ref: ${{ github.ref }} secrets: inherit diff --git a/.github/workflows/workflows/test.yml b/.github/workflows/workflows/test.yml new file mode 100644 index 000000000..c1c6bfd83 --- /dev/null +++ b/.github/workflows/workflows/test.yml @@ -0,0 +1,57 @@ +name: Tests + +on: + workflow_call: + inputs: + ref: + description: The reference to build + type: string + required: true + outputs: + image-id: + description: The ID of image that has been built + value: ${{ jobs.build.outputs.image-id }} + +defaults: + run: + shell: bash + +jobs: + build: + uses: ./.github/workflows/build.yml + with: + ref: ${{ inputs.ref }} + image: cern-sis/inspire/workflows + secrets: inherit + test: + needs: build + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + postgres: + image: postgres:13 + env: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + ports: + - 5432:5432 + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref }} + + - name: Test + run: > + docker run + --network=host + -v "$(pwd)"/tests:/opt/airflow/tests + -v "$(pwd)"/airflow.cfg:/opt/airflow/airflow.cfg + -v "$(pwd)"/requirements-test.txt:/opt/airflow/requirements-test.txt + -v "$(pwd)"/data:/opt/airflow/data + registry.cern.ch/cern-sis/inspire/workflows@${{ needs.build.outputs.image-id }} + bash -c "pip install -r requirements-test.txt && airflow db init && pytest /opt/airflow/tests" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 232b90901..2268ebc48 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,18 +1,18 @@ repos: - - repo: https://github.com/psf/black - rev: "22.8.0" + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 hooks: - - id: black - - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: name-tests-test + args: [ --pytest-test-first ] + exclude: '^(?!factories/)' + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.5.2 hooks: - - id: prettier - - repo: https://github.com/pycqa/isort - rev: "5.12.0" - hooks: - - id: isort - - repo: https://github.com/pycqa/flake8 - rev: "3.9.2" - hooks: - - id: flake8 - args: ["--config=setup.cfg"] + - id: ruff + args: [ --fix, --unsafe-fixes ] + - id: ruff-format diff --git a/.python-version b/.python-version deleted file mode 100644 index 2419ad5b0..000000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.11.9 diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt deleted file mode 100644 index 8fc9a19cf..000000000 --- a/CONTRIBUTORS.txt +++ /dev/null @@ -1 +0,0 @@ -Daniel Roy Greenfeld diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..8da23a86e --- /dev/null +++ b/Makefile @@ -0,0 +1,28 @@ +# Makefile + +# Set the AIRFLOW_HOME environment variable +export AIRFLOW_HOME=$(shell pwd)/workflows +export AIRFLOW_PROJ_DIR=$(AIRFLOW_HOME) + +# Default target +all: run + +run: + docker-compose up + +clean: + docker-compose down + +init: backoffice-init workflows-init + +backoffice-init: + docker-compose exec backoffice-webserver python manage.py create_groups + docker-compose exec backoffice-webserver python manage.py loaddata backoffice/users/fixtures/users.json + docker-compose exec backoffice-webserver python manage.py loaddata backoffice/users/fixtures/tokens.json + echo "Backoffice initialized" + +workflows-init: + docker-compose exec airflow-webserver /entrypoint airflow connections import ./scripts/connections/connections.json + docker-compose exec airflow-webserver /entrypoint airflow variables import ./scripts/variables/variables.json + echo "\033[31mCHANGE inspire_token in Admin->Variables\033[0m" + echo "Workflows initialized" diff --git a/.envs/local/.django b/backoffice/.envs/local/.django similarity index 81% rename from .envs/local/.django rename to backoffice/.envs/local/.django index 6674841e7..741037f70 100644 --- a/.envs/local/.django +++ b/backoffice/.envs/local/.django @@ -10,6 +10,7 @@ REDIS_URL=redis://redis:6379/0 # Celery CELERY_BROKER_URL=amqp://guest:guest@mq:5672/ CELERY_RESULT_BACKEND=redis://redis:6379/2 +CELERY_TASK_ALWAYS_EAGER=True # Flower CELERY_FLOWER_USER=debug CELERY_FLOWER_PASSWORD=debug @@ -19,5 +20,5 @@ OPENSEARCH_HOST=opensearch:9200 OPENSEARCH_INDEX_PREFIX=backoffice-backend-local # Airflow -AIRFLOW_BASE_URL=http://host.docker.internal:8082 -AIRFLOW_TOKEN=CHANGE_ME +AIRFLOW_BASE_URL=http://host.docker.internal:8080 +AIRFLOW_TOKEN=YWlyZmxvdzphaXJmbG93 diff --git a/.envs/docker/.postgres b/backoffice/.envs/local/.postgres similarity index 72% rename from .envs/docker/.postgres rename to backoffice/.envs/local/.postgres index 0bdd6eda2..330be0b80 100644 --- a/.envs/docker/.postgres +++ b/backoffice/.envs/local/.postgres @@ -1,7 +1,7 @@ # PostgreSQL # ------------------------------------------------------------------------------ -POSTGRES_HOST=postgres +POSTGRES_HOST=postgres-backoffice POSTGRES_PORT=5432 POSTGRES_DB=inspire POSTGRES_USER=inspire -POSTGRES_PASSWORD=inspire \ No newline at end of file +POSTGRES_PASSWORD=inspire diff --git a/compose/production/django/Dockerfile b/backoffice/Dockerfile similarity index 100% rename from compose/production/django/Dockerfile rename to backoffice/Dockerfile diff --git a/backoffice/__init__.py b/backoffice/__init__.py deleted file mode 100644 index 9c9b95346..000000000 --- a/backoffice/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = "0.1.0" -__version_info__ = tuple(int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".")) diff --git a/backoffice/backoffice/__init__.py b/backoffice/backoffice/__init__.py new file mode 100644 index 000000000..3da9e5ff5 --- /dev/null +++ b/backoffice/backoffice/__init__.py @@ -0,0 +1,5 @@ +__version__ = "0.1.0" +__version_info__ = tuple( + int(num) if num.isdigit() else num + for num in __version__.replace("-", ".", 1).split(".") +) diff --git a/backoffice/conftest.py b/backoffice/backoffice/conftest.py similarity index 80% rename from backoffice/conftest.py rename to backoffice/backoffice/conftest.py index 9a364e392..1eac8e226 100644 --- a/backoffice/conftest.py +++ b/backoffice/backoffice/conftest.py @@ -5,10 +5,10 @@ @pytest.fixture(autouse=True) -def media_storage(settings, tmpdir): +def _media_storage(settings, tmpdir): settings.MEDIA_ROOT = tmpdir.strpath -@pytest.fixture +@pytest.fixture() def user(db) -> User: return UserFactory() diff --git a/backoffice/contrib/__init__.py b/backoffice/backoffice/contrib/__init__.py similarity index 100% rename from backoffice/contrib/__init__.py rename to backoffice/backoffice/contrib/__init__.py diff --git a/backoffice/contrib/sites/__init__.py b/backoffice/backoffice/contrib/sites/__init__.py similarity index 100% rename from backoffice/contrib/sites/__init__.py rename to backoffice/backoffice/contrib/sites/__init__.py diff --git a/backoffice/contrib/sites/migrations/0001_initial.py b/backoffice/backoffice/contrib/sites/migrations/0001_initial.py similarity index 99% rename from backoffice/contrib/sites/migrations/0001_initial.py rename to backoffice/backoffice/contrib/sites/migrations/0001_initial.py index 304cd6d7c..59647c850 100644 --- a/backoffice/contrib/sites/migrations/0001_initial.py +++ b/backoffice/backoffice/contrib/sites/migrations/0001_initial.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [] operations = [ diff --git a/backoffice/contrib/sites/migrations/0002_alter_domain_unique.py b/backoffice/backoffice/contrib/sites/migrations/0002_alter_domain_unique.py similarity index 99% rename from backoffice/contrib/sites/migrations/0002_alter_domain_unique.py rename to backoffice/backoffice/contrib/sites/migrations/0002_alter_domain_unique.py index 2c8d6dac0..4359049f8 100644 --- a/backoffice/contrib/sites/migrations/0002_alter_domain_unique.py +++ b/backoffice/backoffice/contrib/sites/migrations/0002_alter_domain_unique.py @@ -3,7 +3,6 @@ class Migration(migrations.Migration): - dependencies = [("sites", "0001_initial")] operations = [ diff --git a/backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py b/backoffice/backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py similarity index 100% rename from backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py rename to backoffice/backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py index 8b063f502..c893ca90c 100644 --- a/backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py +++ b/backoffice/backoffice/contrib/sites/migrations/0003_set_site_domain_and_name.py @@ -3,6 +3,7 @@ http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django """ + from django.conf import settings from django.db import migrations @@ -57,7 +58,6 @@ def update_site_backward(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [("sites", "0002_alter_domain_unique")] operations = [migrations.RunPython(update_site_forward, update_site_backward)] diff --git a/backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py b/backoffice/backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py similarity index 99% rename from backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py rename to backoffice/backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py index f7118ca81..095ca009b 100644 --- a/backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py +++ b/backoffice/backoffice/contrib/sites/migrations/0004_alter_options_ordering_domain.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("sites", "0003_set_site_domain_and_name"), ] diff --git a/backoffice/contrib/sites/migrations/__init__.py b/backoffice/backoffice/contrib/sites/migrations/__init__.py similarity index 100% rename from backoffice/contrib/sites/migrations/__init__.py rename to backoffice/backoffice/contrib/sites/migrations/__init__.py diff --git a/backoffice/fixtures/groups.json b/backoffice/backoffice/fixtures/groups.json similarity index 100% rename from backoffice/fixtures/groups.json rename to backoffice/backoffice/fixtures/groups.json diff --git a/backoffice/management/__init__.py b/backoffice/backoffice/management/__init__.py similarity index 100% rename from backoffice/management/__init__.py rename to backoffice/backoffice/management/__init__.py diff --git a/backoffice/management/apps.py b/backoffice/backoffice/management/apps.py similarity index 100% rename from backoffice/management/apps.py rename to backoffice/backoffice/management/apps.py diff --git a/backoffice/management/groups.py b/backoffice/backoffice/management/groups.py similarity index 100% rename from backoffice/management/groups.py rename to backoffice/backoffice/management/groups.py diff --git a/backoffice/management/management/__init__.py b/backoffice/backoffice/management/management/__init__.py similarity index 100% rename from backoffice/management/management/__init__.py rename to backoffice/backoffice/management/management/__init__.py diff --git a/backoffice/management/management/commands/__init__.py b/backoffice/backoffice/management/management/commands/__init__.py similarity index 100% rename from backoffice/management/management/commands/__init__.py rename to backoffice/backoffice/management/management/commands/__init__.py diff --git a/backoffice/management/management/commands/create_groups.py b/backoffice/backoffice/management/management/commands/create_groups.py similarity index 80% rename from backoffice/management/management/commands/create_groups.py rename to backoffice/backoffice/management/management/commands/create_groups.py index 164f1c803..bb6154c39 100644 --- a/backoffice/management/management/commands/create_groups.py +++ b/backoffice/backoffice/management/management/commands/create_groups.py @@ -7,7 +7,8 @@ class Command(BaseCommand): """ - A management command that creates admin and curator groups and gives them all permissions to Workflow model. + A management command that creates admin and curator groups and gives + them all permissions to Workflow model. To run it, execute the following command: >> python manage.py create_groups """ @@ -22,6 +23,9 @@ def handle(self, *args, **options): curator_group.permissions.add(*permissions) self.stdout.write( self.style.SUCCESS( - "Successfully created admin and curator groups and gave them all permissions to Workflow model." + """ + Successfully created admin and curator groups and + gave them all permissions to Workflow model. + """ ) ) diff --git a/backoffice/management/migrations/__init__.py b/backoffice/backoffice/management/migrations/__init__.py similarity index 100% rename from backoffice/management/migrations/__init__.py rename to backoffice/backoffice/management/migrations/__init__.py diff --git a/backoffice/management/permissions.py b/backoffice/backoffice/management/permissions.py similarity index 100% rename from backoffice/management/permissions.py rename to backoffice/backoffice/management/permissions.py diff --git a/backoffice/management/tests/test_commands.py b/backoffice/backoffice/management/tests/test_commands.py similarity index 100% rename from backoffice/management/tests/test_commands.py rename to backoffice/backoffice/management/tests/test_commands.py diff --git a/backoffice/management/tests/test_permissions.py b/backoffice/backoffice/management/tests/test_permissions.py similarity index 92% rename from backoffice/management/tests/test_permissions.py rename to backoffice/backoffice/management/tests/test_permissions.py index e8ae63ea9..4de6492d1 100644 --- a/backoffice/management/tests/test_permissions.py +++ b/backoffice/backoffice/management/tests/test_permissions.py @@ -22,7 +22,9 @@ class PermissionCheckTests(TransactionTestCase): fixtures = ["backoffice/fixtures/groups.json"] def setUp(self): - self.user = User.objects.create_user(email="testuser@test.com", password="testpassword") + self.user = User.objects.create_user( + email="testuser@test.com", password="testpassword" + ) self.admin_group = Group.objects.get(name="admin") self.curator_group = Group.objects.get(name="curator") diff --git a/backoffice/static/css/project.css b/backoffice/backoffice/static/css/project.css similarity index 100% rename from backoffice/static/css/project.css rename to backoffice/backoffice/static/css/project.css diff --git a/backoffice/static/fonts/.gitkeep b/backoffice/backoffice/static/fonts/.gitkeep similarity index 100% rename from backoffice/static/fonts/.gitkeep rename to backoffice/backoffice/static/fonts/.gitkeep diff --git a/backoffice/static/images/favicons/favicon.ico b/backoffice/backoffice/static/images/favicons/favicon.ico similarity index 100% rename from backoffice/static/images/favicons/favicon.ico rename to backoffice/backoffice/static/images/favicons/favicon.ico diff --git a/backoffice/static/js/project.js b/backoffice/backoffice/static/js/project.js similarity index 100% rename from backoffice/static/js/project.js rename to backoffice/backoffice/static/js/project.js diff --git a/backoffice/templates/403.html b/backoffice/backoffice/templates/403.html similarity index 100% rename from backoffice/templates/403.html rename to backoffice/backoffice/templates/403.html diff --git a/backoffice/templates/403_csrf.html b/backoffice/backoffice/templates/403_csrf.html similarity index 100% rename from backoffice/templates/403_csrf.html rename to backoffice/backoffice/templates/403_csrf.html diff --git a/backoffice/templates/404.html b/backoffice/backoffice/templates/404.html similarity index 100% rename from backoffice/templates/404.html rename to backoffice/backoffice/templates/404.html diff --git a/backoffice/templates/500.html b/backoffice/backoffice/templates/500.html similarity index 100% rename from backoffice/templates/500.html rename to backoffice/backoffice/templates/500.html diff --git a/backoffice/templates/account/account_inactive.html b/backoffice/backoffice/templates/account/account_inactive.html similarity index 100% rename from backoffice/templates/account/account_inactive.html rename to backoffice/backoffice/templates/account/account_inactive.html diff --git a/backoffice/templates/account/base.html b/backoffice/backoffice/templates/account/base.html similarity index 100% rename from backoffice/templates/account/base.html rename to backoffice/backoffice/templates/account/base.html diff --git a/backoffice/templates/account/email.html b/backoffice/backoffice/templates/account/email.html similarity index 100% rename from backoffice/templates/account/email.html rename to backoffice/backoffice/templates/account/email.html diff --git a/backoffice/templates/account/email_confirm.html b/backoffice/backoffice/templates/account/email_confirm.html similarity index 100% rename from backoffice/templates/account/email_confirm.html rename to backoffice/backoffice/templates/account/email_confirm.html diff --git a/backoffice/templates/account/login.html b/backoffice/backoffice/templates/account/login.html similarity index 100% rename from backoffice/templates/account/login.html rename to backoffice/backoffice/templates/account/login.html diff --git a/backoffice/templates/account/logout.html b/backoffice/backoffice/templates/account/logout.html similarity index 100% rename from backoffice/templates/account/logout.html rename to backoffice/backoffice/templates/account/logout.html diff --git a/backoffice/templates/account/password_change.html b/backoffice/backoffice/templates/account/password_change.html similarity index 100% rename from backoffice/templates/account/password_change.html rename to backoffice/backoffice/templates/account/password_change.html diff --git a/backoffice/templates/account/password_reset.html b/backoffice/backoffice/templates/account/password_reset.html similarity index 100% rename from backoffice/templates/account/password_reset.html rename to backoffice/backoffice/templates/account/password_reset.html diff --git a/backoffice/templates/account/password_reset_done.html b/backoffice/backoffice/templates/account/password_reset_done.html similarity index 100% rename from backoffice/templates/account/password_reset_done.html rename to backoffice/backoffice/templates/account/password_reset_done.html diff --git a/backoffice/templates/account/password_reset_from_key.html b/backoffice/backoffice/templates/account/password_reset_from_key.html similarity index 100% rename from backoffice/templates/account/password_reset_from_key.html rename to backoffice/backoffice/templates/account/password_reset_from_key.html diff --git a/backoffice/templates/account/password_reset_from_key_done.html b/backoffice/backoffice/templates/account/password_reset_from_key_done.html similarity index 100% rename from backoffice/templates/account/password_reset_from_key_done.html rename to backoffice/backoffice/templates/account/password_reset_from_key_done.html diff --git a/backoffice/templates/account/password_set.html b/backoffice/backoffice/templates/account/password_set.html similarity index 100% rename from backoffice/templates/account/password_set.html rename to backoffice/backoffice/templates/account/password_set.html diff --git a/backoffice/templates/account/signup.html b/backoffice/backoffice/templates/account/signup.html similarity index 100% rename from backoffice/templates/account/signup.html rename to backoffice/backoffice/templates/account/signup.html diff --git a/backoffice/templates/account/signup_closed.html b/backoffice/backoffice/templates/account/signup_closed.html similarity index 100% rename from backoffice/templates/account/signup_closed.html rename to backoffice/backoffice/templates/account/signup_closed.html diff --git a/backoffice/templates/account/verification_sent.html b/backoffice/backoffice/templates/account/verification_sent.html similarity index 100% rename from backoffice/templates/account/verification_sent.html rename to backoffice/backoffice/templates/account/verification_sent.html diff --git a/backoffice/templates/account/verified_email_required.html b/backoffice/backoffice/templates/account/verified_email_required.html similarity index 100% rename from backoffice/templates/account/verified_email_required.html rename to backoffice/backoffice/templates/account/verified_email_required.html diff --git a/backoffice/templates/base.html b/backoffice/backoffice/templates/base.html similarity index 99% rename from backoffice/templates/base.html rename to backoffice/backoffice/templates/base.html index 6eb77e9a0..5f5072955 100644 --- a/backoffice/templates/base.html +++ b/backoffice/backoffice/templates/base.html @@ -26,7 +26,7 @@ - + @@ -48,7 +48,7 @@ - + diff --git a/backoffice/templates/pages/about.html b/backoffice/backoffice/templates/pages/about.html similarity index 92% rename from backoffice/templates/pages/about.html rename to backoffice/backoffice/templates/pages/about.html index 307034833..94d9808cc 100644 --- a/backoffice/templates/pages/about.html +++ b/backoffice/backoffice/templates/pages/about.html @@ -1,3 +1 @@ {% extends "base.html" %} - - diff --git a/backoffice/templates/pages/home.html b/backoffice/backoffice/templates/pages/home.html similarity index 92% rename from backoffice/templates/pages/home.html rename to backoffice/backoffice/templates/pages/home.html index 307034833..94d9808cc 100644 --- a/backoffice/templates/pages/home.html +++ b/backoffice/backoffice/templates/pages/home.html @@ -1,3 +1 @@ {% extends "base.html" %} - - diff --git a/backoffice/templates/users/user_detail.html b/backoffice/backoffice/templates/users/user_detail.html similarity index 94% rename from backoffice/templates/users/user_detail.html rename to backoffice/backoffice/templates/users/user_detail.html index 3edbaa69b..5dcfe3514 100644 --- a/backoffice/templates/users/user_detail.html +++ b/backoffice/backoffice/templates/users/user_detail.html @@ -3,21 +3,21 @@ {% load static %} {% block title %} - User: - + User: + {{ object.name }} - - + + {% endblock title %} {% block content %}

- - + + {{ object.name }} - +

diff --git a/backoffice/templates/users/user_form.html b/backoffice/backoffice/templates/users/user_form.html similarity index 97% rename from backoffice/templates/users/user_form.html rename to backoffice/backoffice/templates/users/user_form.html index 8e2332704..f9b266f78 100644 --- a/backoffice/templates/users/user_form.html +++ b/backoffice/backoffice/templates/users/user_form.html @@ -11,11 +11,11 @@ {% endblock title %} {% block content %}

- - + + {{ user.name }} - - + +

bool: class SocialAccountAdapter(DefaultSocialAccountAdapter): - def is_open_for_signup(self, request: HttpRequest, sociallogin: SocialLogin) -> bool: + def is_open_for_signup( + self, request: HttpRequest, sociallogin: SocialLogin + ) -> bool: return getattr(settings, "ACCOUNT_ALLOW_REGISTRATION", True) - def populate_user(self, request: HttpRequest, sociallogin: SocialLogin, data: dict[str, typing.Any]) -> User: + def populate_user( + self, + request: HttpRequest, + sociallogin: SocialLogin, + data: dict[str, typing.Any], + ) -> User: """ Populates user information from social provider info. diff --git a/backoffice/users/admin.py b/backoffice/backoffice/users/admin.py similarity index 100% rename from backoffice/users/admin.py rename to backoffice/backoffice/users/admin.py diff --git a/backoffice/users/api/serializers.py b/backoffice/backoffice/users/api/serializers.py similarity index 100% rename from backoffice/users/api/serializers.py rename to backoffice/backoffice/users/api/serializers.py diff --git a/backoffice/users/api/views.py b/backoffice/backoffice/users/api/views.py similarity index 94% rename from backoffice/users/api/views.py rename to backoffice/backoffice/users/api/views.py index fa8221482..aadc74d2f 100644 --- a/backoffice/users/api/views.py +++ b/backoffice/backoffice/users/api/views.py @@ -4,8 +4,7 @@ from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, UpdateModelMixin from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet - -from .serializers import UserSerializer +from users.api.serializers import UserSerializer User = get_user_model() diff --git a/backoffice/users/apps.py b/backoffice/backoffice/users/apps.py similarity index 93% rename from backoffice/users/apps.py rename to backoffice/backoffice/users/apps.py index 19d7d8e1a..2c3cac6db 100644 --- a/backoffice/users/apps.py +++ b/backoffice/backoffice/users/apps.py @@ -1,3 +1,4 @@ +# ruff: noqa: SIM105 from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ diff --git a/backoffice/users/context_processors.py b/backoffice/backoffice/users/context_processors.py similarity index 100% rename from backoffice/users/context_processors.py rename to backoffice/backoffice/users/context_processors.py diff --git a/backoffice/backoffice/users/fixtures/tokens.json b/backoffice/backoffice/users/fixtures/tokens.json new file mode 100644 index 000000000..d22238a6e --- /dev/null +++ b/backoffice/backoffice/users/fixtures/tokens.json @@ -0,0 +1,10 @@ +[ +{ + "model": "authtoken.token", + "pk": "2e04111a61e8f5ba6ecec52af21bbb9e81732085", + "fields": { + "user": 1, + "created": "2024-07-23T15:08:17.326Z" + } +} +] diff --git a/backoffice/backoffice/users/fixtures/users.json b/backoffice/backoffice/users/fixtures/users.json new file mode 100644 index 000000000..dbe30f64f --- /dev/null +++ b/backoffice/backoffice/users/fixtures/users.json @@ -0,0 +1,38 @@ +[ +{ + "model": "users.user", + "pk": 1, + "fields": { + "password": "argon2$argon2id$v=19$m=102400,t=2,p=8$M3hDdTNkSHpSTTd0SE5TNW4zNzBQSw$RZmZYp0e0xIReaEvvtg23TX5Pu+OAL2Bl9DPiC0hf/k", + "last_login": "2024-07-23T15:05:59Z", + "is_superuser": true, + "is_staff": true, + "is_active": true, + "date_joined": "2024-07-23T15:05:30Z", + "name": "", + "email": "admin@admin.com", + "groups": [ + 1 + ], + "user_permissions": [] + } +}, +{ + "model": "users.user", + "pk": 2, + "fields": { + "password": "argon2$argon2id$v=19$m=102400,t=2,p=8$ckljTUU5WVhhV1YzNXF2Qk92Z2lzdw$F/uGQYwEhmsLf8CMMTHGW7ttU2Q+EdLxlegqCPGyX7o", + "last_login": null, + "is_superuser": true, + "is_staff": true, + "is_active": true, + "date_joined": "2024-07-23T15:08:00Z", + "name": "", + "email": "curator@admin.com", + "groups": [ + 2 + ], + "user_permissions": [] + } +} +] diff --git a/backoffice/users/forms.py b/backoffice/backoffice/users/forms.py similarity index 100% rename from backoffice/users/forms.py rename to backoffice/backoffice/users/forms.py diff --git a/backoffice/users/managers.py b/backoffice/backoffice/users/managers.py similarity index 100% rename from backoffice/users/managers.py rename to backoffice/backoffice/users/managers.py diff --git a/backoffice/users/migrations/0001_initial.py b/backoffice/backoffice/users/migrations/0001_initial.py similarity index 81% rename from backoffice/users/migrations/0001_initial.py rename to backoffice/backoffice/users/migrations/0001_initial.py index 1c00d6d1e..5ad8474d3 100644 --- a/backoffice/users/migrations/0001_initial.py +++ b/backoffice/backoffice/users/migrations/0001_initial.py @@ -1,13 +1,12 @@ import django.contrib.auth.models import django.contrib.auth.validators -from django.db import migrations, models import django.utils.timezone +from django.db import migrations, models import backoffice.users.models class Migration(migrations.Migration): - initial = True dependencies = [ @@ -30,25 +29,29 @@ class Migration(migrations.Migration): ("password", models.CharField(max_length=128, verbose_name="password")), ( "last_login", - models.DateTimeField(blank=True, null=True, verbose_name="last login"), + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), ), ( "is_superuser", models.BooleanField( default=False, - help_text="Designates that this user has all permissions without explicitly assigning them.", + help_text="Designates that this user has all permissions without explicitly assigning them.", # noqa: E501 verbose_name="superuser status", ), ), ( "email", - models.EmailField(unique=True, max_length=254, verbose_name="email address"), + models.EmailField( + unique=True, max_length=254, verbose_name="email address" + ), ), ( "is_staff", models.BooleanField( default=False, - help_text="Designates whether the user can log into this admin site.", + help_text="Designates whether the user can log into this admin site.", # noqa: E501 verbose_name="staff status", ), ), @@ -56,23 +59,27 @@ class Migration(migrations.Migration): "is_active", models.BooleanField( default=True, - help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", + help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", # noqa: E501 verbose_name="active", ), ), ( "date_joined", - models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"), + models.DateTimeField( + default=django.utils.timezone.now, verbose_name="date joined" + ), ), ( "name", - models.CharField(blank=True, max_length=255, verbose_name="Name of User"), + models.CharField( + blank=True, max_length=255, verbose_name="Name of User" + ), ), ( "groups", models.ManyToManyField( blank=True, - help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", + help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", # noqa: E501 related_name="user_set", related_query_name="user", to="auth.Group", diff --git a/backoffice/users/migrations/__init__.py b/backoffice/backoffice/users/migrations/__init__.py similarity index 100% rename from backoffice/users/migrations/__init__.py rename to backoffice/backoffice/users/migrations/__init__.py diff --git a/backoffice/users/models.py b/backoffice/backoffice/users/models.py similarity index 100% rename from backoffice/users/models.py rename to backoffice/backoffice/users/models.py diff --git a/backoffice/users/tasks.py b/backoffice/backoffice/users/tasks.py similarity index 99% rename from backoffice/users/tasks.py rename to backoffice/backoffice/users/tasks.py index c99341c5f..29486c792 100644 --- a/backoffice/users/tasks.py +++ b/backoffice/backoffice/users/tasks.py @@ -1,6 +1,5 @@ -from django.contrib.auth import get_user_model - from config import celery_app +from django.contrib.auth import get_user_model User = get_user_model() diff --git a/backoffice/users/tests/__init__.py b/backoffice/backoffice/users/tests/__init__.py similarity index 100% rename from backoffice/users/tests/__init__.py rename to backoffice/backoffice/users/tests/__init__.py diff --git a/backoffice/users/tests/factories.py b/backoffice/backoffice/users/tests/factories.py similarity index 100% rename from backoffice/users/tests/factories.py rename to backoffice/backoffice/users/tests/factories.py diff --git a/backoffice/users/tests/test_admin.py b/backoffice/backoffice/users/tests/test_admin.py similarity index 100% rename from backoffice/users/tests/test_admin.py rename to backoffice/backoffice/users/tests/test_admin.py diff --git a/backoffice/users/tests/test_drf_urls.py b/backoffice/backoffice/users/tests/test_drf_urls.py similarity index 81% rename from backoffice/users/tests/test_drf_urls.py rename to backoffice/backoffice/users/tests/test_drf_urls.py index fbf196f2e..37d180122 100644 --- a/backoffice/users/tests/test_drf_urls.py +++ b/backoffice/backoffice/users/tests/test_drf_urls.py @@ -4,7 +4,9 @@ def test_user_detail(user: User): - assert reverse("api:user-detail", kwargs={"pk": user.pk}) == f"/api/users/{user.pk}/" + assert ( + reverse("api:user-detail", kwargs={"pk": user.pk}) == f"/api/users/{user.pk}/" + ) assert resolve(f"/api/users/{user.pk}/").view_name == "api:user-detail" diff --git a/backoffice/users/tests/test_drf_views.py b/backoffice/backoffice/users/tests/test_drf_views.py similarity index 97% rename from backoffice/users/tests/test_drf_views.py rename to backoffice/backoffice/users/tests/test_drf_views.py index b403f765a..fdab42440 100644 --- a/backoffice/users/tests/test_drf_views.py +++ b/backoffice/backoffice/users/tests/test_drf_views.py @@ -6,7 +6,7 @@ class TestUserViewSet: - @pytest.fixture + @pytest.fixture() def api_rf(self) -> APIRequestFactory: return APIRequestFactory() diff --git a/backoffice/users/tests/test_forms.py b/backoffice/backoffice/users/tests/test_forms.py similarity index 99% rename from backoffice/users/tests/test_forms.py rename to backoffice/backoffice/users/tests/test_forms.py index ef4fcbe9f..569c7f3ac 100644 --- a/backoffice/users/tests/test_forms.py +++ b/backoffice/backoffice/users/tests/test_forms.py @@ -1,6 +1,7 @@ """ Module for all Form Tests. """ + from django.utils.translation import gettext_lazy as _ from backoffice.users.forms import UserAdminCreationForm diff --git a/backoffice/users/tests/test_managers.py b/backoffice/backoffice/users/tests/test_managers.py similarity index 96% rename from backoffice/users/tests/test_managers.py rename to backoffice/backoffice/users/tests/test_managers.py index 13a4e538d..ed24d00aa 100644 --- a/backoffice/users/tests/test_managers.py +++ b/backoffice/backoffice/users/tests/test_managers.py @@ -6,7 +6,7 @@ from backoffice.users.models import User -@pytest.mark.django_db +@pytest.mark.django_db() class TestUserManager: def test_create_user(self): user = User.objects.create_user( @@ -37,7 +37,7 @@ def test_create_superuser_username_ignored(self): assert user.username is None -@pytest.mark.django_db +@pytest.mark.django_db() def test_createsuperuser_command(): """Ensure createsuperuser command works with our custom manager.""" out = StringIO() diff --git a/backoffice/users/tests/test_models.py b/backoffice/backoffice/users/tests/test_models.py similarity index 100% rename from backoffice/users/tests/test_models.py rename to backoffice/backoffice/users/tests/test_models.py diff --git a/backoffice/users/tests/test_swagger.py b/backoffice/backoffice/users/tests/test_swagger.py similarity index 95% rename from backoffice/users/tests/test_swagger.py rename to backoffice/backoffice/users/tests/test_swagger.py index f97658b55..5bebf25dd 100644 --- a/backoffice/users/tests/test_swagger.py +++ b/backoffice/backoffice/users/tests/test_swagger.py @@ -8,7 +8,7 @@ def test_swagger_accessible_by_admin(admin_client): assert response.status_code == 200 -@pytest.mark.django_db +@pytest.mark.django_db() def test_swagger_ui_not_accessible_by_normal_user(client): url = reverse("api-docs") response = client.get(url) diff --git a/backoffice/users/tests/test_tasks.py b/backoffice/backoffice/users/tests/test_tasks.py similarity index 100% rename from backoffice/users/tests/test_tasks.py rename to backoffice/backoffice/users/tests/test_tasks.py diff --git a/backoffice/users/tests/test_urls.py b/backoffice/backoffice/users/tests/test_urls.py similarity index 100% rename from backoffice/users/tests/test_urls.py rename to backoffice/backoffice/users/tests/test_urls.py diff --git a/backoffice/users/tests/test_views.py b/backoffice/backoffice/users/tests/test_views.py similarity index 100% rename from backoffice/users/tests/test_views.py rename to backoffice/backoffice/users/tests/test_views.py diff --git a/backoffice/users/urls.py b/backoffice/backoffice/users/urls.py similarity index 70% rename from backoffice/users/urls.py rename to backoffice/backoffice/users/urls.py index 06ebc755f..73dd8975c 100644 --- a/backoffice/users/urls.py +++ b/backoffice/backoffice/users/urls.py @@ -1,6 +1,10 @@ from django.urls import path -from backoffice.users.views import user_detail_view, user_redirect_view, user_update_view +from backoffice.users.views import ( + user_detail_view, + user_redirect_view, + user_update_view, +) app_name = "users" urlpatterns = [ diff --git a/backoffice/users/views.py b/backoffice/backoffice/users/views.py similarity index 89% rename from backoffice/users/views.py rename to backoffice/backoffice/users/views.py index a6d8475cf..c64683e0f 100644 --- a/backoffice/users/views.py +++ b/backoffice/backoffice/users/views.py @@ -23,7 +23,9 @@ class UserUpdateView(LoginRequiredMixin, SuccessMessageMixin, UpdateView): success_message = _("Information successfully updated") def get_success_url(self): - assert self.request.user.is_authenticated # for mypy to know that the user is authenticated + assert ( + self.request.user.is_authenticated + ) # for mypy to know that the user is authenticated return self.request.user.get_absolute_url() def get_object(self): diff --git a/backoffice/utils/__init__.py b/backoffice/backoffice/utils/__init__.py similarity index 100% rename from backoffice/utils/__init__.py rename to backoffice/backoffice/utils/__init__.py diff --git a/backoffice/utils/pagination.py b/backoffice/backoffice/utils/pagination.py similarity index 100% rename from backoffice/utils/pagination.py rename to backoffice/backoffice/utils/pagination.py diff --git a/backoffice/utils/storages.py b/backoffice/backoffice/utils/storages.py similarity index 100% rename from backoffice/utils/storages.py rename to backoffice/backoffice/utils/storages.py diff --git a/backoffice/workflows/__init__.py b/backoffice/backoffice/workflows/__init__.py similarity index 100% rename from backoffice/workflows/__init__.py rename to backoffice/backoffice/workflows/__init__.py diff --git a/backoffice/workflows/admin.py b/backoffice/backoffice/workflows/admin.py similarity index 75% rename from backoffice/workflows/admin.py rename to backoffice/backoffice/workflows/admin.py index ee5a5078d..308def4b6 100644 --- a/backoffice/workflows/admin.py +++ b/backoffice/backoffice/workflows/admin.py @@ -1,8 +1,9 @@ from django.contrib import admin +from django.db.models import JSONField +from django_json_widget.widgets import JSONEditorWidget from backoffice.management.permissions import IsAdminOrCuratorUser - -from .models import Workflow +from backoffice.workflows.models import Workflow class WorkflowsAdminSite(admin.AdminSite): @@ -24,7 +25,8 @@ class WorkflowsAdminSite(admin.AdminSite): def has_permission(self, request): return request.user.is_active and ( - request.user.is_superuser or request.user.groups.filter(name="curator").exists() + request.user.is_superuser + or request.user.groups.filter(name="curator").exists() ) @@ -45,25 +47,42 @@ class WorkflowAdmin(admin.ModelAdmin): "_created_at", "_updated_at", ) - list_filter = ["workflow_type", "status", "core", "is_update", "_created_at", "_updated_at"] + list_filter = [ + "workflow_type", + "status", + "core", + "is_update", + "_created_at", + "_updated_at", + ] + + formfield_overrides = { + JSONField: {"widget": JSONEditorWidget}, + } def has_view_permission(self, request, obj=None): """ Returns True if the user has permission to view the Workflow model. """ permission_check = IsAdminOrCuratorUser() - return request.user.is_superuser or permission_check.has_permission(request, self) + return request.user.is_superuser or permission_check.has_permission( + request, self + ) def has_change_permission(self, request, obj=None): """ Returns True if the user has permission to change the Workflow model. """ permission_check = IsAdminOrCuratorUser() - return request.user.is_superuser or permission_check.has_permission(request, self) + return request.user.is_superuser or permission_check.has_permission( + request, self + ) def has_delete_permission(self, request, obj=None): """ Returns True if the user has permission to delete the Workflow model. """ permission_check = IsAdminOrCuratorUser() - return request.user.is_superuser or permission_check.has_permission(request, self) + return request.user.is_superuser or permission_check.has_permission( + request, self + ) diff --git a/backoffice/workflows/airflow_utils.py b/backoffice/backoffice/workflows/airflow_utils.py similarity index 62% rename from backoffice/workflows/airflow_utils.py rename to backoffice/backoffice/workflows/airflow_utils.py index 00952c2a4..c04e1b361 100644 --- a/backoffice/workflows/airflow_utils.py +++ b/backoffice/backoffice/workflows/airflow_utils.py @@ -1,3 +1,4 @@ +import logging from os import environ import requests @@ -7,7 +8,12 @@ AIRFLOW_BASE_URL = environ.get("AIRFLOW_BASE_URL") -AIRFLOW_HEADERS = {"Content-Type": "application/json", "Authorization": f"Basic {environ.get('AIRFLOW_TOKEN')}"} +AIRFLOW_HEADERS = { + "Content-Type": "application/json", + "Authorization": f"Basic {environ.get('AIRFLOW_TOKEN')}", +} + +logger = logging.getLogger(__name__) def trigger_airflow_dag(dag_id, workflow_id, extra_data=None): @@ -26,9 +32,16 @@ def trigger_airflow_dag(dag_id, workflow_id, extra_data=None): url = f"{AIRFLOW_BASE_URL}/api/v1/dags/{dag_id}/dagRuns" try: + logger.info( + "Triggering DAG %s with data: %s and %s %s", + dag_id, + data, + AIRFLOW_HEADERS, + url, + ) response = requests.post(url, json=data, headers=AIRFLOW_HEADERS) response.raise_for_status() return JsonResponse(response.json()) - except RequestException as req_err: - data = {"error": req_err} - return JsonResponse(data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + except RequestException: + data = {"error": response.json()} + return JsonResponse(data, status=status.HTTP_502_BAD_GATEWAY) diff --git a/backoffice/workflows/api/__init__.py b/backoffice/backoffice/workflows/api/__init__.py similarity index 100% rename from backoffice/workflows/api/__init__.py rename to backoffice/backoffice/workflows/api/__init__.py diff --git a/backoffice/workflows/api/serializers.py b/backoffice/backoffice/workflows/api/serializers.py similarity index 93% rename from backoffice/workflows/api/serializers.py rename to backoffice/backoffice/workflows/api/serializers.py index 726f532bf..b774391af 100644 --- a/backoffice/workflows/api/serializers.py +++ b/backoffice/backoffice/workflows/api/serializers.py @@ -1,11 +1,10 @@ from django_elasticsearch_dsl_drf.serializers import DocumentSerializer from rest_framework import serializers +from backoffice.workflows.constants import ResolutionDags from backoffice.workflows.documents import WorkflowDocument from backoffice.workflows.models import Workflow, WorkflowTicket -from ..constants import ResolutionDags - class WorkflowSerializer(serializers.ModelSerializer): class Meta: diff --git a/backoffice/workflows/api/views.py b/backoffice/backoffice/workflows/api/views.py similarity index 74% rename from backoffice/workflows/api/views.py rename to backoffice/backoffice/workflows/api/views.py index 119f0fef2..251b5ee17 100644 --- a/backoffice/workflows/api/views.py +++ b/backoffice/backoffice/workflows/api/views.py @@ -8,16 +8,15 @@ from backoffice.utils.pagination import OSStandardResultsSetPagination from backoffice.workflows import airflow_utils -from backoffice.workflows.documents import WorkflowDocument -from backoffice.workflows.models import Workflow, WorkflowTicket - -from ..constants import WORKFLOW_DAG, ResolutionDags -from .serializers import ( +from backoffice.workflows.api.serializers import ( AuthorResolutionSerializer, WorkflowDocumentSerializer, WorkflowSerializer, WorkflowTicketSerializer, ) +from backoffice.workflows.constants import WORKFLOW_DAG, ResolutionDags +from backoffice.workflows.documents import WorkflowDocument +from backoffice.workflows.models import Workflow, WorkflowTicket logger = logging.getLogger(__name__) @@ -36,7 +35,9 @@ def get_queryset(self): class WorkflowPartialUpdateViewSet(viewsets.ViewSet): def partial_update(self, request, pk=None): workflow_instance = get_object_or_404(Workflow, pk=pk) - serializer = WorkflowSerializer(workflow_instance, data=request.data, partial=True) + serializer = WorkflowSerializer( + workflow_instance, data=request.data, partial=True + ) if serializer.is_valid(): serializer.save() @@ -51,15 +52,21 @@ def retrieve(self, request, *args, **kwargs): if not workflow_id or not ticket_type: return Response( - {"error": "Both workflow_id and ticket_type are required."}, status=status.HTTP_400_BAD_REQUEST + {"error": "Both workflow_id and ticket_type are required."}, + status=status.HTTP_400_BAD_REQUEST, ) try: - workflow_ticket = WorkflowTicket.objects.get(workflow_id=workflow_id, ticket_type=ticket_type) + workflow_ticket = WorkflowTicket.objects.get( + workflow_id=workflow_id, ticket_type=ticket_type + ) serializer = WorkflowTicketSerializer(workflow_ticket) return Response(serializer.data) except WorkflowTicket.DoesNotExist: - return Response({"error": "Workflow ticket not found."}, status=status.HTTP_404_NOT_FOUND) + return Response( + {"error": "Workflow ticket not found."}, + status=status.HTTP_404_NOT_FOUND, + ) def create(self, request, *args, **kwargs): workflow_id = request.data.get("workflow_id") @@ -68,7 +75,8 @@ def create(self, request, *args, **kwargs): if not all([workflow_id, ticket_type, ticket_id]): return Response( - {"error": "Workflow_id, ticket_id and ticket_type are required."}, status=status.HTTP_400_BAD_REQUEST + {"error": "Workflow_id, ticket_id and ticket_type are required."}, + status=status.HTTP_400_BAD_REQUEST, ) try: @@ -79,7 +87,9 @@ def create(self, request, *args, **kwargs): serializer = WorkflowTicketSerializer(workflow_ticket) return Response(serializer.data, status=status.HTTP_201_CREATED) except Exception as e: - return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + return Response( + {"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) class AuthorWorkflowViewSet(viewsets.ViewSet): @@ -90,10 +100,17 @@ def create(self, request): serializer = self.serializer_class(data=request.data) if serializer.is_valid(raise_exception=True): workflow = Workflow.objects.create( - data=serializer.validated_data["data"], workflow_type=serializer.validated_data["workflow_type"] + data=serializer.validated_data["data"], + workflow_type=serializer.validated_data["workflow_type"], ) - logger.info("Trigger Airflow DAG: %s for %s", WORKFLOW_DAG[workflow.workflow_type], workflow.id) - return airflow_utils.trigger_airflow_dag(WORKFLOW_DAG[workflow.workflow_type], str(workflow.id), workflow.data) + logger.info( + "Trigger Airflow DAG: %s for %s", + WORKFLOW_DAG[workflow.workflow_type], + workflow.id, + ) + return airflow_utils.trigger_airflow_dag( + WORKFLOW_DAG[workflow.workflow_type], str(workflow.id), workflow.data + ) @action(detail=True, methods=["post"]) def resolve(self, request, pk=None): @@ -101,7 +118,11 @@ def resolve(self, request, pk=None): serializer = AuthorResolutionSerializer(data=request.data) if serializer.is_valid(raise_exception=True): extra_data = {"create_ticket": serializer.validated_data["create_ticket"]} - logger.info("Trigger Airflow DAG: %s for %s", ResolutionDags[serializer.validated_data["value"]], pk) + logger.info( + "Trigger Airflow DAG: %s for %s", + ResolutionDags[serializer.validated_data["value"]], + pk, + ) return airflow_utils.trigger_airflow_dag( ResolutionDags[serializer.validated_data["value"]].label, pk, extra_data ) diff --git a/backoffice/workflows/apps.py b/backoffice/backoffice/workflows/apps.py similarity index 100% rename from backoffice/workflows/apps.py rename to backoffice/backoffice/workflows/apps.py diff --git a/backoffice/workflows/constants.py b/backoffice/backoffice/workflows/constants.py similarity index 100% rename from backoffice/workflows/constants.py rename to backoffice/backoffice/workflows/constants.py diff --git a/backoffice/workflows/documents.py b/backoffice/backoffice/workflows/documents.py similarity index 100% rename from backoffice/workflows/documents.py rename to backoffice/backoffice/workflows/documents.py diff --git a/backoffice/workflows/migrations/0001_initial.py b/backoffice/backoffice/workflows/migrations/0001_initial.py similarity index 86% rename from backoffice/workflows/migrations/0001_initial.py rename to backoffice/backoffice/workflows/migrations/0001_initial.py index 2d4328b47..faa4704b3 100644 --- a/backoffice/workflows/migrations/0001_initial.py +++ b/backoffice/backoffice/workflows/migrations/0001_initial.py @@ -1,9 +1,10 @@ # Generated by Django 4.2.6 on 2023-10-16 09:06 -from django.db import migrations, models -import django.db.models.deletion import uuid +import django.db.models.deletion +from django.db import migrations, models + class Migration(migrations.Migration): initial = True @@ -14,7 +15,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="WorkflowData", fields=[ - ("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), ("data", models.JSONField()), ], ), diff --git a/backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py b/backoffice/backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py similarity index 82% rename from backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py rename to backoffice/backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py index dc659ed1f..1c426d8f3 100644 --- a/backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py +++ b/backoffice/backoffice/workflows/migrations/0002_workflow_remove_workflowmeta_id_and_more.py @@ -1,8 +1,9 @@ # Generated by Django 4.2.6 on 2023-10-17 07:16 -from django.db import migrations, models import uuid +from django.db import migrations, models + class Migration(migrations.Migration): dependencies = [ @@ -13,7 +14,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Workflow", fields=[ - ("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), ("data", models.JSONField()), ( "status", diff --git a/backoffice/workflows/migrations/0003_workflowticket.py b/backoffice/backoffice/workflows/migrations/0003_workflowticket.py similarity index 53% rename from backoffice/workflows/migrations/0003_workflowticket.py rename to backoffice/backoffice/workflows/migrations/0003_workflowticket.py index 085524edc..8255454a6 100644 --- a/backoffice/workflows/migrations/0003_workflowticket.py +++ b/backoffice/backoffice/workflows/migrations/0003_workflowticket.py @@ -1,7 +1,7 @@ # Generated by Django 4.2.6 on 2023-11-20 14:17 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): @@ -13,11 +13,22 @@ class Migration(migrations.Migration): migrations.CreateModel( name="WorkflowTicket", fields=[ - ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("ticket_id", models.CharField(max_length=32)), ( "workflow_id", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="workflows.workflow"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="workflows.workflow", + ), ), ], ), diff --git a/backoffice/workflows/migrations/0004_workflow_workflow_type.py b/backoffice/backoffice/workflows/migrations/0004_workflow_workflow_type.py similarity index 100% rename from backoffice/workflows/migrations/0004_workflow_workflow_type.py rename to backoffice/backoffice/workflows/migrations/0004_workflow_workflow_type.py diff --git a/backoffice/workflows/migrations/0005_workflowticket_ticket_type_alter_workflow_status.py b/backoffice/backoffice/workflows/migrations/0005_workflowticket_ticket_type_alter_workflow_status.py similarity index 100% rename from backoffice/workflows/migrations/0005_workflowticket_ticket_type_alter_workflow_status.py rename to backoffice/backoffice/workflows/migrations/0005_workflowticket_ticket_type_alter_workflow_status.py diff --git a/backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py b/backoffice/backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py similarity index 82% rename from backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py rename to backoffice/backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py index 9f4abbc34..b472ffd8f 100644 --- a/backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py +++ b/backoffice/backoffice/workflows/migrations/0006_workflow__created_at_workflow__updated_at.py @@ -1,7 +1,7 @@ # Generated by Django 4.2.6 on 2024-06-03 06:25 -from django.db import migrations, models import django.utils.timezone +from django.db import migrations, models class Migration(migrations.Migration): @@ -13,7 +13,9 @@ class Migration(migrations.Migration): migrations.AddField( model_name="workflow", name="_created_at", - field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), + field=models.DateTimeField( + auto_now_add=True, default=django.utils.timezone.now + ), preserve_default=False, ), migrations.AddField( diff --git a/backoffice/workflows/migrations/0007_alter_workflow_core_alter_workflow_is_update.py b/backoffice/backoffice/workflows/migrations/0007_alter_workflow_core_alter_workflow_is_update.py similarity index 100% rename from backoffice/workflows/migrations/0007_alter_workflow_core_alter_workflow_is_update.py rename to backoffice/backoffice/workflows/migrations/0007_alter_workflow_core_alter_workflow_is_update.py diff --git a/backoffice/workflows/migrations/0008_alter_workflow_status_alter_workflow_workflow_type.py b/backoffice/backoffice/workflows/migrations/0008_alter_workflow_status_alter_workflow_workflow_type.py similarity index 100% rename from backoffice/workflows/migrations/0008_alter_workflow_status_alter_workflow_workflow_type.py rename to backoffice/backoffice/workflows/migrations/0008_alter_workflow_status_alter_workflow_workflow_type.py diff --git a/backoffice/workflows/migrations/__init__.py b/backoffice/backoffice/workflows/migrations/__init__.py similarity index 100% rename from backoffice/workflows/migrations/__init__.py rename to backoffice/backoffice/workflows/migrations/__init__.py diff --git a/backoffice/workflows/models.py b/backoffice/backoffice/workflows/models.py similarity index 77% rename from backoffice/workflows/models.py rename to backoffice/backoffice/workflows/models.py index 1b3b47698..265641cbf 100644 --- a/backoffice/workflows/models.py +++ b/backoffice/backoffice/workflows/models.py @@ -2,7 +2,7 @@ from django.db import models -from .constants import ( +from backoffice.workflows.constants import ( DEFAULT_STATUS_CHOICE, DEFAULT_TICKET_TYPE, DEFAULT_WORKFLOW_TYPE, @@ -35,5 +35,9 @@ class Workflow(models.Model): class WorkflowTicket(models.Model): workflow_id = models.ForeignKey(Workflow, on_delete=models.CASCADE) - ticket_id = models.CharField(max_length=32, null=False, blank=False) # in SNOW it's GUID - ticket_type = models.CharField(max_length=30, choices=TICKET_TYPES, default=DEFAULT_TICKET_TYPE) + ticket_id = models.CharField( + max_length=32, null=False, blank=False + ) # in SNOW it's GUID + ticket_type = models.CharField( + max_length=30, choices=TICKET_TYPES, default=DEFAULT_TICKET_TYPE + ) diff --git a/backoffice/workflows/tests/test_views.py b/backoffice/backoffice/workflows/tests/test_views.py similarity index 80% rename from backoffice/workflows/tests/test_views.py rename to backoffice/backoffice/workflows/tests/test_views.py index 1b847ee18..f245e830c 100644 --- a/backoffice/workflows/tests/test_views.py +++ b/backoffice/backoffice/workflows/tests/test_views.py @@ -25,9 +25,13 @@ def setUp(self): self.curator_group = Group.objects.get(name="curator") self.admin_group = Group.objects.get(name="admin") - self.curator = User.objects.create_user(email="curator@test.com", password="12345") + self.curator = User.objects.create_user( + email="curator@test.com", password="12345" + ) self.admin = User.objects.create_user(email="admin@test.com", password="12345") - self.user = User.objects.create_user(email="testuser@test.com", password="12345") + self.user = User.objects.create_user( + email="testuser@test.com", password="12345" + ) self.curator.groups.add(self.curator_group) self.admin.groups.add(self.admin_group) @@ -42,7 +46,9 @@ class TestWorkflowViewSet(BaseTransactionTestCase): def setUp(self): super().setUp() - self.workflow = Workflow.objects.create(data={}, status=StatusChoices.APPROVAL, core=True, is_update=False) + self.workflow = Workflow.objects.create( + data={}, status=StatusChoices.APPROVAL, core=True, is_update=False + ) def test_list_curator(self): self.api_client.force_authenticate(user=self.curator) @@ -75,7 +81,9 @@ def setUp(self): super().setUp() index = Index("backoffice-backend-test-workflows") index.delete(ignore=[400, 404]) - self.workflow = Workflow.objects.create(data={}, status=StatusChoices.APPROVAL, core=True, is_update=False) + self.workflow = Workflow.objects.create( + data={}, status=StatusChoices.APPROVAL, core=True, is_update=False + ) def test_list_curator(self): self.api_client.force_authenticate(user=self.curator) @@ -105,7 +113,9 @@ class TestWorkflowPartialUpdateViewSet(BaseTransactionTestCase): def setUp(self): super().setUp() - self.workflow = Workflow.objects.create(data={}, status=StatusChoices.APPROVAL, core=True, is_update=False) + self.workflow = Workflow.objects.create( + data={}, status=StatusChoices.APPROVAL, core=True, is_update=False + ) @property def endpoint(self): @@ -113,7 +123,9 @@ def endpoint(self): def test_patch_curator(self): self.api_client.force_authenticate(user=self.curator) - response = self.api_client.patch(self.endpoint, format="json", data={"status": "running"}) + response = self.api_client.patch( + self.endpoint, format="json", data={"status": "running"} + ) self.assertEqual(response.status_code, 200) workflow = Workflow.objects.filter(id=str(self.workflow.id))[0] @@ -122,13 +134,15 @@ def test_patch_curator(self): def test_patch_admin(self): self.api_client.force_authenticate(user=self.admin) response = self.api_client.patch( - self.endpoint, format="json", data={"status": "approval", "data": {"test": "test"}} + self.endpoint, + format="json", + data={"status": "approval", "data": {"test": "test"}}, ) workflow = Workflow.objects.filter(id=str(self.workflow.id))[0] self.assertEqual(response.status_code, 200) - self.assertEquals(workflow.status, "approval") - self.assertEquals( + self.assertEqual(workflow.status, "approval") + self.assertEqual( workflow.data, { "test": "test", @@ -149,7 +163,9 @@ class TestWorkflowTicketViewSet(BaseTransactionTestCase): def setUp(self): super().setUp() - self.workflow = Workflow.objects.create(data={}, status="running", core=True, is_update=False) + self.workflow = Workflow.objects.create( + data={}, status="running", core=True, is_update=False + ) self.workflow_ticket = WorkflowTicket.objects.create( workflow_id=self.workflow, ticket_id="123", ticket_type="author_create_user" ) @@ -157,17 +173,23 @@ def setUp(self): def test_get_missing_params(self): self.api_client.force_authenticate(user=self.curator) response = self.api_client.get( - f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", format="json", data={} + f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", + format="json", + data={}, ) assert response.status_code == 400 - assert response.data == {"error": "Both workflow_id and ticket_type are required."} + assert response.data == { + "error": "Both workflow_id and ticket_type are required." + } def test_get_ticket_not_found(self): query_params = {"ticket_type": "test"} self.api_client.force_authenticate(user=self.curator) response = self.api_client.get( - f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", format="json", data=query_params + f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", + format="json", + data=query_params, ) assert response.status_code == 404 @@ -178,7 +200,9 @@ def test_get_ticket_happy_flow(self): query_params = {"ticket_type": self.workflow_ticket.ticket_type} response = self.api_client.get( - f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", format="json", data=query_params + f"{TestWorkflowTicketViewSet.endpoint}/{self.workflow.id}/", + format="json", + data=query_params, ) assert response.status_code == 200 @@ -186,10 +210,14 @@ def test_get_ticket_happy_flow(self): def test_create_missing_params(self): self.api_client.force_authenticate(user=self.curator) - response = self.api_client.post(f"{TestWorkflowTicketViewSet.endpoint}/", format="json", data={}) + response = self.api_client.post( + f"{TestWorkflowTicketViewSet.endpoint}/", format="json", data={} + ) assert response.status_code == 400 - assert response.data == {"error": "Workflow_id, ticket_id and ticket_type are required."} + assert response.data == { + "error": "Workflow_id, ticket_id and ticket_type are required." + } def test_create_happy_flow(self): self.api_client.force_authenticate(user=self.curator) @@ -199,7 +227,9 @@ def test_create_happy_flow(self): "ticket_id": "dc94caad1b4f71502d06117a3b4bcb25", "ticket_type": "author_create_user", } - response = self.api_client.post(f"{TestWorkflowTicketViewSet.endpoint}/", format="json", data=data) + response = self.api_client.post( + f"{TestWorkflowTicketViewSet.endpoint}/", format="json", data=data + ) assert response.status_code == 201 @@ -207,7 +237,10 @@ def test_create_happy_flow(self): assert "ticket_id" in response.data assert "ticket_type" in response.data - assert response.data == WorkflowTicketSerializer(WorkflowTicket.objects.last()).data + assert ( + response.data + == WorkflowTicketSerializer(WorkflowTicket.objects.last()).data + ) class TestAuthorWorkflowViewSet(BaseTransactionTestCase): @@ -251,7 +284,9 @@ def test_accept_author(self, mock_post): data = {"create_ticket": True, "value": "accept"} response = self.api_client.post( - reverse("api:workflows-authors-resolve", kwargs={"pk": "WORKFLOW_ID"}), format="json", data=data + reverse("api:workflows-authors-resolve", kwargs={"pk": "WORKFLOW_ID"}), + format="json", + data=data, ) self.assertEqual(response.status_code, 200) @@ -267,7 +302,9 @@ def test_reject_author(self, mock_post): data = {"create_ticket": True, "value": "reject"} response = self.api_client.post( - reverse("api:workflows-authors-resolve", kwargs={"pk": "WORKFLOW_ID"}), format="json", data=data + reverse("api:workflows-authors-resolve", kwargs={"pk": "WORKFLOW_ID"}), + format="json", + data=data, ) self.assertEqual(response.status_code, 200) diff --git a/config/__init__.py b/backoffice/config/__init__.py similarity index 73% rename from config/__init__.py rename to backoffice/config/__init__.py index 10f501427..183c6abeb 100644 --- a/config/__init__.py +++ b/backoffice/config/__init__.py @@ -1,5 +1,5 @@ # This will make sure the app is always imported when # Django starts so that shared_task will use this app. -from .celery_app import app as celery_app +from config.celery_app import app as celery_app __all__ = ("celery_app",) diff --git a/config/api_router.py b/backoffice/config/api_router.py similarity index 56% rename from config/api_router.py rename to backoffice/config/api_router.py index 74452a7c0..3969a7c20 100644 --- a/config/api_router.py +++ b/backoffice/config/api_router.py @@ -9,17 +9,20 @@ WorkflowViewSet, ) -if settings.DEBUG: - router = DefaultRouter() -else: - router = SimpleRouter() +router = DefaultRouter() if settings.DEBUG else SimpleRouter() router.register("users", UserViewSet) # Workflows -router.register("workflows/authors", AuthorWorkflowViewSet, basename="workflows-authors"), +( + router.register( + "workflows/authors", AuthorWorkflowViewSet, basename="workflows-authors" + ), +) router.register("workflows", WorkflowViewSet, basename="workflows") -router.register("workflow-update", WorkflowPartialUpdateViewSet, basename="workflow-update") -router.register("workflow-ticket", WorkflowTicketViewSet, basename="workflow-ticket"), +router.register( + "workflow-update", WorkflowPartialUpdateViewSet, basename="workflow-update" +) +(router.register("workflow-ticket", WorkflowTicketViewSet, basename="workflow-ticket"),) app_name = "api" urlpatterns = router.urls diff --git a/config/asgi.py b/backoffice/config/asgi.py similarity index 99% rename from config/asgi.py rename to backoffice/config/asgi.py index 9d453fb89..63a888ea9 100644 --- a/config/asgi.py +++ b/backoffice/config/asgi.py @@ -7,6 +7,7 @@ https://docs.djangoproject.com/en/dev/howto/deployment/asgi/ """ + import os import sys from pathlib import Path diff --git a/config/celery_app.py b/backoffice/config/celery_app.py similarity index 100% rename from config/celery_app.py rename to backoffice/config/celery_app.py diff --git a/config/search_router.py b/backoffice/config/search_router.py similarity index 78% rename from config/search_router.py rename to backoffice/config/search_router.py index 5b4127b69..3fd3ca590 100644 --- a/config/search_router.py +++ b/backoffice/config/search_router.py @@ -3,10 +3,7 @@ from backoffice.workflows.api.views import WorkflowDocumentView -if settings.DEBUG: - router = DefaultRouter() -else: - router = SimpleRouter() +router = DefaultRouter() if settings.DEBUG else SimpleRouter() # Workflow diff --git a/config/settings/__init__.py b/backoffice/config/settings/__init__.py similarity index 100% rename from config/settings/__init__.py rename to backoffice/config/settings/__init__.py diff --git a/config/settings/base.py b/backoffice/config/settings/base.py similarity index 96% rename from config/settings/base.py rename to backoffice/config/settings/base.py index f9d3048eb..e8fa62886 100644 --- a/config/settings/base.py +++ b/backoffice/config/settings/base.py @@ -1,6 +1,7 @@ """ Base settings to build other settings files upon. """ + import platform from pathlib import Path @@ -16,8 +17,8 @@ READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=True) if READ_DOT_ENV_FILE: # OS environment variables take precedence over variables from .env - env.read_env(str(BASE_DIR / ".envs/docker/.django")) - env.read_env(str(BASE_DIR / ".envs/docker/.postgres")) + env.read_env(str(BASE_DIR / ".envs/local/.django")) + env.read_env(str(BASE_DIR / ".envs/local/.postgres")) # GENERAL # ------------------------------------------------------------------------------ @@ -58,7 +59,9 @@ "NAME": env("POSTGRES_DB"), "USER": env("POSTGRES_USER"), "PASSWORD": env("POSTGRES_PASSWORD"), - "HOST": env("POSTGRES_HOST") if platform.system() == "Linux" else "localhost", + "HOST": env("POSTGRES_HOST") + if platform.system() == "Linux" + else "localhost", "PORT": env("POSTGRES_PORT"), } } @@ -102,6 +105,7 @@ "django_opensearch_dsl", "django_elasticsearch_dsl_drf", "rest_framework_simplejwt", + "django_json_widget", ] LOCAL_APPS = ["backoffice.users", "backoffice.workflows", "backoffice.management"] @@ -139,7 +143,9 @@ ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ - {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" # noqa: E501 + }, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, @@ -266,7 +272,7 @@ "disable_existing_loggers": False, "formatters": { "verbose": { - "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s", + "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s", # noqa: E501 }, }, "handlers": { @@ -342,9 +348,11 @@ "DEFAULT_AUTHENTICATION_CLASSES": ( "rest_framework.authentication.SessionAuthentication", "rest_framework.authentication.TokenAuthentication", - 'rest_framework_simplejwt.authentication.JWTAuthentication', + "rest_framework_simplejwt.authentication.JWTAuthentication", + ), + "DEFAULT_PERMISSION_CLASSES": ( + "backoffice.management.permissions.IsAdminOrCuratorUser", ), - "DEFAULT_PERMISSION_CLASSES": ("backoffice.management.permissions.IsAdminOrCuratorUser",), "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema", "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", } @@ -352,7 +360,8 @@ # django-cors-headers - https://github.com/adamchainz/django-cors-headers#setup CORS_URLS_REGEX = r"^/api/.*$" -# By Default swagger ui is available only to admin user(s). You can change permission classes to change that +# By Default swagger ui is available only to admin user(s). You can change permission +# classes to change that # See more configuration options at https://drf-spectacular.readthedocs.io/en/latest/settings.html#settings SPECTACULAR_SETTINGS = { "TITLE": "backoffice API", diff --git a/config/settings/local.py b/backoffice/config/settings/local.py similarity index 98% rename from config/settings/local.py rename to backoffice/config/settings/local.py index 6a2f8b6ad..0f0898949 100644 --- a/config/settings/local.py +++ b/backoffice/config/settings/local.py @@ -1,5 +1,5 @@ from .base import * # noqa -from .base import env +from config.settings.base import env # GENERAL # ------------------------------------------------------------------------------ diff --git a/config/settings/production.py b/backoffice/config/settings/production.py similarity index 96% rename from config/settings/production.py rename to backoffice/config/settings/production.py index dcce0c06e..1641b199a 100644 --- a/config/settings/production.py +++ b/backoffice/config/settings/production.py @@ -7,8 +7,9 @@ from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration +from config.settings.base import env + from .base import * # noqa -from .base import env # GENERAL # ------------------------------------------------------------------------------ @@ -51,11 +52,15 @@ # TODO: set this to 60 seconds first and then to 518400 once you prove the former works SECURE_HSTS_SECONDS = 60 # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains -SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True) +SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( + "DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True +) # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True) # https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff -SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True) +SECURE_CONTENT_TYPE_NOSNIFF = env.bool( + "DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True +) # Metrics endpoint needs to be http for prometheus to access it SECURE_REDIRECT_EXEMPT = [r"^metrics$"] @@ -143,7 +148,7 @@ "disable_existing_loggers": True, "formatters": { "verbose": { - "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s", + "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s", # noqa: E501 }, }, "handlers": { diff --git a/config/settings/test.py b/backoffice/config/settings/test.py similarity index 97% rename from config/settings/test.py rename to backoffice/config/settings/test.py index b7afc7719..7ee0cbda6 100644 --- a/config/settings/test.py +++ b/backoffice/config/settings/test.py @@ -3,7 +3,7 @@ """ from .base import * # noqa -from .base import env +from config.settings.base import env # GENERAL # ------------------------------------------------------------------------------ @@ -52,4 +52,3 @@ "timeout": 30, }, } - diff --git a/config/urls.py b/backoffice/config/urls.py similarity index 96% rename from config/urls.py rename to backoffice/config/urls.py index fe35582e5..db2635c01 100644 --- a/config/urls.py +++ b/backoffice/config/urls.py @@ -11,7 +11,9 @@ urlpatterns = [ path("", TemplateView.as_view(template_name="pages/home.html"), name="home"), - path("about/", TemplateView.as_view(template_name="pages/about.html"), name="about"), + path( + "about/", TemplateView.as_view(template_name="pages/about.html"), name="about" + ), # Django Admin, use {% url 'admin:index' %} path(settings.ADMIN_URL, admin.site.urls), # User management diff --git a/config/websocket.py b/backoffice/config/websocket.py similarity index 63% rename from config/websocket.py rename to backoffice/config/websocket.py index 81adfbc66..abdf19f9f 100644 --- a/config/websocket.py +++ b/backoffice/config/websocket.py @@ -8,6 +8,5 @@ async def websocket_application(scope, receive, send): if event["type"] == "websocket.disconnect": break - if event["type"] == "websocket.receive": - if event["text"] == "ping": - await send({"type": "websocket.send", "text": "pong!"}) + if event["type"] == "websocket.receive" and event["text"] == "ping": + await send({"type": "websocket.send", "text": "pong!"}) diff --git a/config/wsgi.py b/backoffice/config/wsgi.py similarity index 99% rename from config/wsgi.py rename to backoffice/config/wsgi.py index a12f99627..3b41587a7 100644 --- a/config/wsgi.py +++ b/backoffice/config/wsgi.py @@ -13,6 +13,7 @@ framework. """ + import os import sys from pathlib import Path diff --git a/locale/README.md b/backoffice/locale/README.md similarity index 100% rename from locale/README.md rename to backoffice/locale/README.md diff --git a/locale/en_US/LC_MESSAGES/django.po b/backoffice/locale/en_US/LC_MESSAGES/django.po similarity index 100% rename from locale/en_US/LC_MESSAGES/django.po rename to backoffice/locale/en_US/LC_MESSAGES/django.po diff --git a/locale/fr_FR/LC_MESSAGES/django.po b/backoffice/locale/fr_FR/LC_MESSAGES/django.po similarity index 100% rename from locale/fr_FR/LC_MESSAGES/django.po rename to backoffice/locale/fr_FR/LC_MESSAGES/django.po diff --git a/locale/pt_BR/LC_MESSAGES/django.po b/backoffice/locale/pt_BR/LC_MESSAGES/django.po similarity index 100% rename from locale/pt_BR/LC_MESSAGES/django.po rename to backoffice/locale/pt_BR/LC_MESSAGES/django.po diff --git a/manage.py b/backoffice/manage.py similarity index 98% rename from manage.py rename to backoffice/manage.py index 68d145ed8..d09168bda 100755 --- a/manage.py +++ b/backoffice/manage.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# ruff: noqa: B904 import os import sys from pathlib import Path diff --git a/merge_production_dotenvs_in_dotenv.py b/backoffice/merge_production_dotenvs_in_dotenv.py similarity index 100% rename from merge_production_dotenvs_in_dotenv.py rename to backoffice/merge_production_dotenvs_in_dotenv.py diff --git a/poetry.lock b/backoffice/poetry.lock similarity index 99% rename from poetry.lock rename to backoffice/poetry.lock index 973163035..b3c1e0bbe 100644 --- a/poetry.lock +++ b/backoffice/poetry.lock @@ -1029,6 +1029,16 @@ files = [ [package.dependencies] Django = ">=3.2" +[[package]] +name = "django-json-widget" +version = "2.0.1" +description = "Django json widget is an alternative widget that makes it easy to edit the jsonfield field of django." +optional = false +python-versions = "*" +files = [ + {file = "django-json-widget-2.0.1.tar.gz", hash = "sha256:adb4cab17fe5a04139037d7d84725369530ef35b912c3790d3a7b13f99351358"}, +] + [[package]] name = "django-model-utils" version = "4.3.1" @@ -3775,4 +3785,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "~3.11" -content-hash = "513c738f634b5100805f018953c91c5a6c26b2df4527f792c7368509e1c0e36d" +content-hash = "2ea1eb28c8db8dbe5680a156be48175a8efeb08dbf5470194a572cf4fba223e9" diff --git a/pyproject.toml b/backoffice/pyproject.toml similarity index 99% rename from pyproject.toml rename to backoffice/pyproject.toml index 44f4a80de..f034e1ff4 100644 --- a/pyproject.toml +++ b/backoffice/pyproject.toml @@ -149,6 +149,7 @@ django-elasticsearch-dsl-drf = {git = "https://github.com/cern-sis/django-elasti opensearch-dsl = "^2.1.0" opensearch-py = "2.6.0" djangorestframework-simplejwt = "^5.3.1" +django-json-widget = "^2.0.1" [tool.poetry.dev-dependencies] factory-boy = "3.3.0" diff --git a/tests/test_merge_production_dotenvs_in_dotenv.py b/backoffice/tests/test_merge_production_dotenvs_in_dotenv.py similarity index 99% rename from tests/test_merge_production_dotenvs_in_dotenv.py rename to backoffice/tests/test_merge_production_dotenvs_in_dotenv.py index c0e68f60a..45513d60e 100644 --- a/tests/test_merge_production_dotenvs_in_dotenv.py +++ b/backoffice/tests/test_merge_production_dotenvs_in_dotenv.py @@ -1,7 +1,6 @@ from pathlib import Path import pytest - from merge_production_dotenvs_in_dotenv import merge diff --git a/compose/local/django/Dockerfile b/compose/local/django/Dockerfile deleted file mode 100644 index c1dc2d0f7..000000000 --- a/compose/local/django/Dockerfile +++ /dev/null @@ -1,30 +0,0 @@ -FROM python:3.11.6-slim-bullseye as python - -ARG APP_HOME=/app -WORKDIR ${APP_HOME} - -ARG BUILD_ENVIRONMENT=local -ENV BUILD_ENV ${BUILD_ENVIRONMENT} - -ENV PATH="/root/.local/bin:${PATH}" \ - POETRY_VIRTUALENVS_CREATE=false \ - PYTHONUNBUFFERED=1 \ - PYTHONDONTWRITEBYTECODE=1 - -RUN apt-get update && apt-get install --no-install-recommends -y \ - curl libpq-dev build-essential gettext \ - && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ - && rm -rf /var/lib/apt/lists/* - -ARG POETRY_VERSION -ENV POETRY_VERSION="${POETRY_VERSION:-1.6.1}" -RUN curl -sSL https://install.python-poetry.org \ - | python - --version "${POETRY_VERSION}" \ - && poetry --version - -COPY poetry.lock ./poetry.lock -COPY pyproject.toml ./pyproject.toml -RUN poetry install --no-root - -COPY . ${APP_HOME} -RUN poetry install diff --git a/compose/local/django/celery/beat/start b/compose/local/django/celery/beat/start deleted file mode 100644 index 8adc4891a..000000000 --- a/compose/local/django/celery/beat/start +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o nounset - - -rm -f './celerybeat.pid' -exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app beat -l INFO' diff --git a/compose/local/django/celery/flower/start b/compose/local/django/celery/flower/start deleted file mode 100644 index b4783d2f0..000000000 --- a/compose/local/django/celery/flower/start +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o nounset - -exec watchfiles --filter python celery.__main__.main \ - --args \ - "-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\"" diff --git a/compose/local/django/celery/worker/start b/compose/local/django/celery/worker/start deleted file mode 100644 index 183a80159..000000000 --- a/compose/local/django/celery/worker/start +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o nounset - - -exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app worker -l INFO' diff --git a/compose/local/django/start b/compose/local/django/start deleted file mode 100644 index 1549d1334..000000000 --- a/compose/local/django/start +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o pipefail -set -o nounset - - -python manage.py migrate -exec uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html' diff --git a/compose/production/django/entrypoint b/compose/production/django/entrypoint deleted file mode 100644 index 249d8d9fd..000000000 --- a/compose/production/django/entrypoint +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o pipefail -set -o nounset - - - -# N.B. If only .env files supported variable expansion... -export CELERY_BROKER_URL="${REDIS_URL}" - - -if [ -z "${POSTGRES_USER}" ]; then - base_postgres_image_default_user='postgres' - export POSTGRES_USER="${base_postgres_image_default_user}" -fi -export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" - -python << END -import sys -import time - -import psycopg - -suggest_unrecoverable_after = 30 -start = time.time() - -while True: - try: - psycopg.connect( - dbname="${POSTGRES_DB}", - user="${POSTGRES_USER}", - password="${POSTGRES_PASSWORD}", - host="${POSTGRES_HOST}", - port="${POSTGRES_PORT}", - ) - break - except psycopg.OperationalError as error: - sys.stderr.write("Waiting for PostgreSQL to become available...\n") - - if time.time() - start > suggest_unrecoverable_after: - sys.stderr.write(" This is taking longer than expected. The following exception may be indicative of an unrecoverable error: '{}'\n".format(error)) - - time.sleep(1) -END - ->&2 echo 'PostgreSQL is available' - -exec "$@" diff --git a/compose/production/django/start b/compose/production/django/start deleted file mode 100644 index 83ffc8f78..000000000 --- a/compose/production/django/start +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -set -o errexit -set -o pipefail -set -o nounset - - -python /app/manage.py collectstatic --noinput - -exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn.workers.UvicornWorker diff --git a/compose/production/nginx/Dockerfile b/compose/production/nginx/Dockerfile deleted file mode 100644 index 911b16f71..000000000 --- a/compose/production/nginx/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM nginx:1.17.8-alpine -COPY ./compose/production/nginx/default.conf /etc/nginx/conf.d/default.conf diff --git a/compose/production/nginx/default.conf b/compose/production/nginx/default.conf deleted file mode 100644 index 562dba86c..000000000 --- a/compose/production/nginx/default.conf +++ /dev/null @@ -1,7 +0,0 @@ -server { - listen 80; - server_name localhost; - location /media/ { - alias /usr/share/nginx/media/; - } -} diff --git a/compose/production/postgres/Dockerfile b/compose/production/postgres/Dockerfile deleted file mode 100644 index 101aa8125..000000000 --- a/compose/production/postgres/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM postgres:14 - -COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance -RUN chmod +x /usr/local/bin/maintenance/* -RUN mv /usr/local/bin/maintenance/* /usr/local/bin \ - && rmdir /usr/local/bin/maintenance diff --git a/compose/production/postgres/maintenance/_sourced/constants.sh b/compose/production/postgres/maintenance/_sourced/constants.sh deleted file mode 100644 index 6ca4f0ca9..000000000 --- a/compose/production/postgres/maintenance/_sourced/constants.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - - -BACKUP_DIR_PATH='/backups' -BACKUP_FILE_PREFIX='backup' diff --git a/compose/production/postgres/maintenance/_sourced/countdown.sh b/compose/production/postgres/maintenance/_sourced/countdown.sh deleted file mode 100644 index e6cbfb6ff..000000000 --- a/compose/production/postgres/maintenance/_sourced/countdown.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - - -countdown() { - declare desc="A simple countdown. Source: https://superuser.com/a/611582" - local seconds="${1}" - local d=$(($(date +%s) + "${seconds}")) - while [ "$d" -ge `date +%s` ]; do - echo -ne "$(date -u --date @$(($d - `date +%s`)) +%H:%M:%S)\r"; - sleep 0.1 - done -} diff --git a/compose/production/postgres/maintenance/_sourced/messages.sh b/compose/production/postgres/maintenance/_sourced/messages.sh deleted file mode 100644 index f6be756e9..000000000 --- a/compose/production/postgres/maintenance/_sourced/messages.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env bash - - -message_newline() { - echo -} - -message_debug() -{ - echo -e "DEBUG: ${@}" -} - -message_welcome() -{ - echo -e "\e[1m${@}\e[0m" -} - -message_warning() -{ - echo -e "\e[33mWARNING\e[0m: ${@}" -} - -message_error() -{ - echo -e "\e[31mERROR\e[0m: ${@}" -} - -message_info() -{ - echo -e "\e[37mINFO\e[0m: ${@}" -} - -message_suggestion() -{ - echo -e "\e[33mSUGGESTION\e[0m: ${@}" -} - -message_success() -{ - echo -e "\e[32mSUCCESS\e[0m: ${@}" -} diff --git a/compose/production/postgres/maintenance/_sourced/yes_no.sh b/compose/production/postgres/maintenance/_sourced/yes_no.sh deleted file mode 100644 index fd9cae161..000000000 --- a/compose/production/postgres/maintenance/_sourced/yes_no.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - - -yes_no() { - declare desc="Prompt for confirmation. \$\"\{1\}\": confirmation message." - local arg1="${1}" - - local response= - read -r -p "${arg1} (y/[n])? " response - if [[ "${response}" =~ ^[Yy]$ ]] - then - exit 0 - else - exit 1 - fi -} diff --git a/compose/production/postgres/maintenance/backup b/compose/production/postgres/maintenance/backup deleted file mode 100644 index f72304c05..000000000 --- a/compose/production/postgres/maintenance/backup +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash - - -### Create a database backup. -### -### Usage: -### $ docker compose -f .yml (exec |run --rm) postgres backup - - -set -o errexit -set -o pipefail -set -o nounset - - -working_dir="$(dirname ${0})" -source "${working_dir}/_sourced/constants.sh" -source "${working_dir}/_sourced/messages.sh" - - -message_welcome "Backing up the '${POSTGRES_DB}' database..." - - -if [[ "${POSTGRES_USER}" == "postgres" ]]; then - message_error "Backing up as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again." - exit 1 -fi - -export PGHOST="${POSTGRES_HOST}" -export PGPORT="${POSTGRES_PORT}" -export PGUSER="${POSTGRES_USER}" -export PGPASSWORD="${POSTGRES_PASSWORD}" -export PGDATABASE="${POSTGRES_DB}" - -backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz" -pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}" - - -message_success "'${POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'." diff --git a/compose/production/postgres/maintenance/backups b/compose/production/postgres/maintenance/backups deleted file mode 100644 index a18937d62..000000000 --- a/compose/production/postgres/maintenance/backups +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - - -### View backups. -### -### Usage: -### $ docker compose -f .yml (exec |run --rm) postgres backups - - -set -o errexit -set -o pipefail -set -o nounset - - -working_dir="$(dirname ${0})" -source "${working_dir}/_sourced/constants.sh" -source "${working_dir}/_sourced/messages.sh" - - -message_welcome "These are the backups you have got:" - -ls -lht "${BACKUP_DIR_PATH}" diff --git a/compose/production/postgres/maintenance/restore b/compose/production/postgres/maintenance/restore deleted file mode 100644 index c68f17d71..000000000 --- a/compose/production/postgres/maintenance/restore +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash - - -### Restore database from a backup. -### -### Parameters: -### <1> filename of an existing backup. -### -### Usage: -### $ docker compose -f .yml (exec |run --rm) postgres restore <1> - - -set -o errexit -set -o pipefail -set -o nounset - - -working_dir="$(dirname ${0})" -source "${working_dir}/_sourced/constants.sh" -source "${working_dir}/_sourced/messages.sh" - - -if [[ -z ${1+x} ]]; then - message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again." - exit 1 -fi -backup_filename="${BACKUP_DIR_PATH}/${1}" -if [[ ! -f "${backup_filename}" ]]; then - message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again." - exit 1 -fi - -message_welcome "Restoring the '${POSTGRES_DB}' database from the '${backup_filename}' backup..." - -if [[ "${POSTGRES_USER}" == "postgres" ]]; then - message_error "Restoring as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again." - exit 1 -fi - -export PGHOST="${POSTGRES_HOST}" -export PGPORT="${POSTGRES_PORT}" -export PGUSER="${POSTGRES_USER}" -export PGPASSWORD="${POSTGRES_PASSWORD}" -export PGDATABASE="${POSTGRES_DB}" - -message_info "Dropping the database..." -dropdb "${PGDATABASE}" - -message_info "Creating a new database..." -createdb --owner="${POSTGRES_USER}" - -message_info "Applying the backup to the new database..." -gunzip -c "${backup_filename}" | psql "${POSTGRES_DB}" - -message_success "The '${POSTGRES_DB}' database has been restored from the '${backup_filename}' backup." diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 000000000..1cfca7147 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,344 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. +# +# WARNING: This configuration is for local development. Do not use it in a production deployment. +# +# This configuration supports basic configuration using environment variables or an .env file +# The following variables are supported: +# +# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. +# Default: apache/airflow:2.8.3 +# AIRFLOW_UID - User ID in Airflow containers +# Default: 50000 +# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed. +# Default: . +# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode +# +# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). +# Default: airflow +# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). +# Default: airflow +# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. +# Use this option ONLY for quick checks. Installing requirements at container +# startup is done EVERY TIME the service is started. +# A better way is to build a custom image or extend the official image +# as described in https://airflow.apache.org/docs/docker-stack/build.html. +# Default: '' +# +# Feel free to modify this file to suit your needs. +--- +x-airflow-common: &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml + # and uncomment the "build" line below, Then run `docker-compose build` to build the images. + image: apache/airflow:2.8.3-python3.11 + build: + context: workflows + dockerfile: Dockerfile + environment: &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres-airflow/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres-airflow/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: "" + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true" + AIRFLOW__CORE__LOAD_EXAMPLES: "false" + AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session" + # yamllint disable rule:line-length + # Use simple http server on scheduler for health checks + # See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server + # yamllint enable rule:line-length + AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true" + # WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks + # for other purpose (development, test and especially production usage) build/extend Airflow image. + _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} + volumes: + - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs + - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config + - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins + - ${AIRFLOW_PROJ_DIR:-.}/scripts:/opt/airflow/scripts + + user: "${AIRFLOW_UID:-50000}:0" + depends_on: &airflow-common-depends-on + redis: + condition: service_healthy + postgres-airflow: + condition: service_healthy + +services: + backoffice-webserver: + build: + context: backoffice + dockerfile: Dockerfile + image: backoffice_local_django + depends_on: + - postgres-backoffice + - redis + volumes: + - ./backoffice:/app:z + env_file: + - ./backoffice/.envs/local/.django + - ./backoffice/.envs/local/.postgres + ports: + - "8000:8000" + command: sh -c 'poetry run python manage.py migrate && poetry run python manage.py runserver 0.0.0.0:8000' + + postgres-airflow: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + + postgres-backoffice: + image: postgres:13 + environment: + POSTGRES_USER: inspire + POSTGRES_PASSWORD: inspire + POSTGRES_DB: inspire + volumes: + - postgres-backoffice-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "inspire"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + + redis: + image: redis:latest + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always + + mq: + image: rabbitmq:3.9.11-management + restart: "always" + + opensearch: + platform: linux/amd64 + image: registry.cern.ch/cern-sis/inspirehep/opensearch + restart: "always" + environment: + - cluster.name=opensearch-cluster + - node.name=opensearch-node1 + - discovery.seed_hosts=opensearch-node1 + - bootstrap.memory_lock=true + - discovery.type=single-node + - DISABLE_SECURITY_PLUGIN=true + - "ES_JAVA_OPTS=-Xms1024m -Xmx1024m" + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 + hard: 65536 + mem_limit: 2g + + airflow-webserver: + <<: *airflow-common + command: webserver + ports: + - "8080:8080" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + # yamllint disable rule:line-length + test: + - "CMD-SHELL" + - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + environment: + <<: *airflow-common-env + # Required to handle warm shutdown of the celery workers properly + # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation + DUMB_INIT_SETSID: "0" + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-triggerer: + <<: *airflow-common + command: triggerer + healthcheck: + test: + [ + "CMD-SHELL", + 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"', + ] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + # yamllint disable rule:line-length + command: + - -c + - | + if [[ -z "${AIRFLOW_UID}" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" + echo "If you are on Linux, you SHOULD follow the instructions below to set " + echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." + echo "For other operating systems you can get rid of the warning with manually created .env file:" + echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" + echo + fi + one_meg=1048576 + mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) + cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) + disk_available=$$(df / | tail -1 | awk '{print $$4}') + warning_resources="false" + if (( mem_available < 4000 )) ; then + echo + echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" + echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" + echo + warning_resources="true" + fi + if (( cpus_available < 2 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" + echo "At least 2 CPUs recommended. You have $${cpus_available}" + echo + warning_resources="true" + fi + if (( disk_available < one_meg * 10 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" + echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" + echo + warning_resources="true" + fi + if [[ $${warning_resources} == "true" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" + echo "Please follow the instructions to increase amount of resources available:" + echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" + echo + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + # yamllint enable rule:line-length + environment: + <<: *airflow-common-env + _AIRFLOW_DB_MIGRATE: "true" + _AIRFLOW_WWW_USER_CREATE: "true" + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + _PIP_ADDITIONAL_REQUIREMENTS: "" + user: "0:0" + volumes: + - ${AIRFLOW_PROJ_DIR:-.}:/sources + + airflow-cli: + <<: *airflow-common + profiles: + - debug + environment: + <<: *airflow-common-env + CONNECTION_CHECK_MAX_COUNT: "0" + # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 + command: + - bash + - -c + - airflow + + # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up + # or by explicitly targeted on the command line e.g. docker-compose up flower. + # See: https://docs.docker.com/compose/profiles/ + flower: + <<: *airflow-common + command: celery flower + profiles: + - flower + ports: + - "5555:5555" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5555/"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + +volumes: + postgres-db-volume: + postgres-backoffice-db-volume: diff --git a/local.yml b/local.yml deleted file mode 100644 index b203da004..000000000 --- a/local.yml +++ /dev/null @@ -1,113 +0,0 @@ -version: "3" - -volumes: - backoffice_postgres_data: {} - backoffice_postgres_data_backups: {} - -services: - django: &django - build: - context: . - dockerfile: ./compose/local/django/Dockerfile - image: backoffice_local_django - container_name: backoffice_local_django - depends_on: - - postgres - - redis - volumes: - - .:/app:z - env_file: - - ./.envs/local/.django - - ./.envs/local/.postgres - ports: - - "8000:8000" - command: sh -c 'poetry run python manage.py migrate && poetry run python manage.py runserver 0.0.0.0:8000' - networks: - - djangonetwork - - postgres: - build: - context: . - dockerfile: ./compose/production/postgres/Dockerfile - image: backoffice_production_postgres - container_name: backoffice_local_postgres - ports: - - "5432:5432" - volumes: - - backoffice_postgres_data:/var/lib/postgresql/data - - backoffice_postgres_data_backups:/backups - env_file: - - ./.envs/local/.postgres - networks: - - djangonetwork - - redis: - image: redis:6 - container_name: backoffice_local_redis - networks: - - djangonetwork - - celeryworker: - <<: *django - image: backoffice_local_celeryworker - container_name: backoffice_local_celeryworker - depends_on: - - redis - - postgres - ports: [] - command: celery -A config.celery_app worker -l INFO - networks: - - djangonetwork - - celerybeat: - <<: *django - image: backoffice_local_celerybeat - container_name: backoffice_local_celerybeat - depends_on: - - redis - - postgres - ports: [] - command: celery -A config.celery_app beat -l INFO - networks: - - djangonetwork - - mq: - image: rabbitmq:3.9.11-management - restart: "always" - container_name: backoffice_local_mq - ports: - - "5672:5672" - networks: - - djangonetwork - - opensearch: - container_name: backoffice_local_opensearch - platform: linux/amd64 - image: registry.cern.ch/cern-sis/inspirehep/opensearch - restart: "always" - environment: - - cluster.name=opensearch-cluster - - node.name=opensearch-node1 - - discovery.seed_hosts=opensearch-node1 - - bootstrap.memory_lock=true - - discovery.type=single-node - - DISABLE_SECURITY_PLUGIN=true - - "ES_JAVA_OPTS=-Xms1024m -Xmx1024m" - ulimits: - memlock: - soft: -1 - hard: -1 - nofile: - soft: 65536 - hard: 65536 - mem_limit: 2g - ports: - - 9200:9200 - - 9300:9300 - - 9600:9600 # performance analysis - networks: - - djangonetwork - -networks: - djangonetwork: - driver: bridge diff --git a/production.yml b/production.yml deleted file mode 100644 index ff1f8e8dc..000000000 --- a/production.yml +++ /dev/null @@ -1,69 +0,0 @@ -version: "3" - -volumes: - production_postgres_data: {} - production_postgres_data_backups: {} - -services: - django: &django - build: - context: . - dockerfile: ./compose/production/django/Dockerfile - - image: backoffice_production_django - depends_on: - - postgres - - redis - env_file: - - ./.envs/docker/.django - - ./.envs/docker/.postgres - command: sh -c 'poetry run python manage.py migrate && poetry run python manage.py runserver 0.0.0.0:8000' - - postgres: - build: - context: . - dockerfile: ./compose/production/postgres/Dockerfile - image: backoffice_production_postgres - volumes: - - production_postgres_data:/var/lib/postgresql/data - - production_postgres_data_backups:/backups - env_file: - - ./.envs/docker/.postgres - - redis: - image: redis:6 - - celeryworker: - <<: *django - image: backoffice_production_celeryworker - depends_on: - - redis - - postgres - ports: [] - command: celery -A config.celery_app worker -l INFO - networks: - - djangonetwork - - celerybeat: - <<: *django - image: backoffice_production_celerybeat - command: celery -A config.celery_app beat -l INFO - depends_on: - - redis - - postgres - ports: [] - networks: - - djangonetwork - - mq: - image: rabbitmq:3.9.11-management - restart: "always" - container_name: backofficee_production_mq - ports: - - "5672:5672" - networks: - - djangonetwork - -networks: - djangonetwork: - driver: bridge diff --git a/requirements/base.txt b/requirements/base.txt deleted file mode 100644 index 29c56e7f2..000000000 --- a/requirements/base.txt +++ /dev/null @@ -1,24 +0,0 @@ -python-slugify==8.0.1 # https://github.com/un33k/python-slugify -Pillow==10.0.1 # https://github.com/python-pillow/Pillow -argon2-cffi==23.1.0 # https://github.com/hynek/argon2_cffi -redis==5.0.1 # https://github.com/redis/redis-py -hiredis==2.2.3 # https://github.com/redis/hiredis-py -celery==5.3.4 # pyup: < 6.0 # https://github.com/celery/celery -django-celery-beat==2.5.0 # https://github.com/celery/django-celery-beat -flower==2.0.1 # https://github.com/mher/flower -uvicorn[standard]==0.23.2 # https://github.com/encode/uvicorn - -# Django -# ------------------------------------------------------------------------------ -django==4.2.6 # pyup: < 5.0 # https://www.djangoproject.com/ -django-environ==0.11.2 # https://github.com/joke2k/django-environ -django-model-utils==4.3.1 # https://github.com/jazzband/django-model-utils -django-allauth==0.57.0 # https://github.com/pennersr/django-allauth -django-crispy-forms==2.0 # https://github.com/django-crispy-forms/django-crispy-forms -crispy-bootstrap5==0.7 # https://github.com/django-crispy-forms/crispy-bootstrap5 -django-redis==5.4.0 # https://github.com/jazzband/django-redis -# Django REST Framework -djangorestframework==3.14.0 # https://github.com/encode/django-rest-framework -django-cors-headers==4.2.0 # https://github.com/adamchainz/django-cors-headers -# DRF-spectacular for api documentation -drf-spectacular==0.26.5 # https://github.com/tfranzel/drf-spectacular diff --git a/requirements/local.txt b/requirements/local.txt deleted file mode 100644 index 9af50e697..000000000 --- a/requirements/local.txt +++ /dev/null @@ -1,34 +0,0 @@ --r base.txt - -Werkzeug[watchdog]==3.0.1 # https://github.com/pallets/werkzeug -ipdb==0.13.13 # https://github.com/gotcha/ipdb -psycopg[c]==3.1.12 # https://github.com/psycopg/psycopg -watchfiles==0.20.0 # https://github.com/samuelcolvin/watchfiles - -# Testing -# ------------------------------------------------------------------------------ -mypy==1.5.1 # https://github.com/python/mypy -django-stubs[compatible-mypy]==4.2.4 # https://github.com/typeddjango/django-stubs -pytest==7.4.2 # https://github.com/pytest-dev/pytest -pytest-sugar==0.9.7 # https://github.com/Frozenball/pytest-sugar -djangorestframework-stubs[compatible-mypy]==3.14.3 # https://github.com/typeddjango/djangorestframework-stubs - -# Code quality -# ------------------------------------------------------------------------------ -flake8==6.1.0 # https://github.com/PyCQA/flake8 -flake8-isort==6.1.0 # https://github.com/gforcada/flake8-isort -coverage==7.3.2 # https://github.com/nedbat/coveragepy -black==23.9.1 # https://github.com/psf/black -djlint==1.34.0 # https://github.com/Riverside-Healthcare/djLint -pylint-django==2.5.3 # https://github.com/PyCQA/pylint-django -pylint-celery==0.3 # https://github.com/PyCQA/pylint-celery -pre-commit==3.4.0 # https://github.com/pre-commit/pre-commit - -# Django -# ------------------------------------------------------------------------------ -factory-boy==3.3.0 # https://github.com/FactoryBoy/factory_boy - -django-debug-toolbar==4.2.0 # https://github.com/jazzband/django-debug-toolbar -django-extensions==3.2.3 # https://github.com/django-extensions/django-extensions -django-coverage-plugin==3.1.0 # https://github.com/nedbat/django_coverage_plugin -pytest-django==4.5.2 # https://github.com/pytest-dev/pytest-django diff --git a/requirements/production.txt b/requirements/production.txt deleted file mode 100644 index 2fc900085..000000000 --- a/requirements/production.txt +++ /dev/null @@ -1,13 +0,0 @@ -# PRECAUTION: avoid production dependencies that aren't in development - --r base.txt - -gunicorn==21.2.0 # https://github.com/benoitc/gunicorn -psycopg[c]==3.1.12 # https://github.com/psycopg/psycopg -Collectfast==2.2.0 # https://github.com/antonagestam/collectfast -sentry-sdk==1.31.0 # https://github.com/getsentry/sentry-python - -# Django -# ------------------------------------------------------------------------------ -django-storages[s3]==1.14.2 # https://github.com/jschneier/django-storages -django-anymail==10.1 # https://github.com/anymail/django-anymail diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..d849b4aeb --- /dev/null +++ b/ruff.toml @@ -0,0 +1,39 @@ +target-version = "py311" +ignore = ["PT009"] + +[lint.flake8-tidy-imports] +ban-relative-imports = "all" + +[lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", + # flake8-tidy-imports + "TID", + # flake8-pytest-style + "PT", + # airflow-variable-name-task-id-mismatch + "AIR", + +] + + +[lint.pycodestyle] +ignore-overlong-task-comments = true + +[lint.pydocstyle] +convention = "google" + +[format] +# Format all docstring code snippets with a line length of 60. +docstring-code-line-length = 60 diff --git a/workflows/Dockerfile b/workflows/Dockerfile new file mode 100644 index 000000000..3c73abda5 --- /dev/null +++ b/workflows/Dockerfile @@ -0,0 +1,9 @@ +FROM apache/airflow:2.8.3-python3.11 + +WORKDIR /opt/airflow + +COPY --chown=airflow:root dags ./dags/ +COPY --chown=airflow:root plugins ./plugins/ +COPY --chown=airflow:root requirements.txt ./requirements.txt + +RUN pip install --no-cache-dir -r requirements.txt diff --git a/workflows/dags/__init__.py b/workflows/dags/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/__init__.py b/workflows/dags/author/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/author_create/README.md b/workflows/dags/author/author_create/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/author_create/__init__.py b/workflows/dags/author/author_create/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/author_create/author_create_approved.py b/workflows/dags/author/author_create/author_create_approved.py new file mode 100644 index 000000000..75766a0f2 --- /dev/null +++ b/workflows/dags/author/author_create/author_create_approved.py @@ -0,0 +1,190 @@ +import datetime +import logging + +from airflow.decorators import dag, task +from airflow.models.param import Param +from airflow.utils.trigger_rule import TriggerRule +from hooks.backoffice.workflow_management_hook import WorkflowManagementHook +from hooks.backoffice.workflow_ticket_management_hook import ( + WorkflowTicketManagementHook, +) +from hooks.inspirehep.inspire_http_hook import InspireHttpHook +from hooks.inspirehep.inspire_http_record_management_hook import ( + InspireHTTPRecordManagementHook, +) +from include.utils.set_workflow_status import ( + get_wf_status_from_inspire_response, + set_workflow_status_to_error, +) + +logger = logging.getLogger(__name__) + + +@dag( + params={ + "workflow_id": Param(type="string", default=""), + "data": Param(type="object", default={}), + "create_ticket": Param(type="boolean", default=False), + }, + start_date=datetime.datetime(2024, 5, 5), + schedule_interval=None, + catchup=False, + on_failure_callback=set_workflow_status_to_error, # TODO: what if callback fails? Data in backoffice not up to date! +) +def author_create_approved_dag(): + """Defines the DAG for the author creation workflow after curator's approval. + + Tasks: + 1. author_check_approval: Branching for the workflow: based on create_ticket + parameter + 2. create_ticket_on_author_approval: Creates a ticket using the InspireHttpHook to + call the API endpoint. + 3. create_author_on_inspire: Updates the author record on INSPIRE using the + InspireHTTPRecordManagementHook. + 4. close_ticket: Closes the ticket associated with the author creation workflow. + 5. set_author_create_workflow_status_to_completed: Sets the status of the author + creation workflow to 'completed'. + + """ + inspire_http_hook = InspireHttpHook() + inspire_http_record_management_hook = InspireHTTPRecordManagementHook() + workflow_management_hook = WorkflowManagementHook() + workflow_ticket_management_hook = WorkflowTicketManagementHook() + + @task() + def set_workflow_status_to_running(**context): + status_name = "running" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task.branch() + def author_check_approval_branch(**context: dict) -> None: + """Branching for the workflow: based on create_ticket parameter + dag goes either to create_ticket_on_author_approval task or + directly to create_author_on_inspire + """ + if context["params"]["create_ticket"]: + return "create_author_create_curation_ticket" + else: + return "empty_task" + + @task + def create_author_create_curation_ticket(**context: dict) -> None: + endpoint = "api/tickets/create" + request_data = { + "functional_category": "", + "workflow_id": context["params"]["workflow_id"], + "subject": "test", # TODO: update subject and description + "description": "test", + "caller_email": "", # leave empty + "template": "curation_needed_author", # TODO: check template + } + response = inspire_http_hook.call_api( + endpoint=endpoint, data=request_data, method="POST" + ) + workflow_ticket_management_hook.create_ticket_entry( + workflow_id=context["params"]["workflow_id"], + ticket_id=response.json()["ticket_id"], + ticket_type="author_create_curation", + ) + + @task(do_xcom_push=True) + def create_author_on_inspire(**context: dict) -> str: + workflow_data = workflow_management_hook.get_workflow( + workflow_id=context["params"]["workflow_id"] + ) + response = inspire_http_record_management_hook.post_record( + data=workflow_data["data"], pid_type="authors" + ) + status = get_wf_status_from_inspire_response(response) + if response.ok: + control_number = response.json()["metadata"]["control_number"] + workflow_data["data"]["control_number"] = control_number + workflow_management_hook.partial_update_workflow( + workflow_id=context["params"]["workflow_id"], + workflow_partial_update_data={"data": workflow_data["data"]}, + ) + return status + + @task.branch() + def author_create_success_branch(**context: dict) -> str: + ti = context["ti"] + workflow_status = ti.xcom_pull(task_ids="create_author_on_inspire") + if workflow_status == "completed": + return "author_check_approval_branch" + else: + return "set_author_create_workflow_status_to_error" + + @task(trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) + def close_author_create_user_ticket(**context: dict) -> None: + ticket_type = "author_create_user" + ticket_id = workflow_ticket_management_hook.get_ticket( + workflow_id=context["params"]["workflow_id"], ticket_type=ticket_type + )["ticket_id"] + endpoint = "api/tickets/resolve" + request_data = {"ticket_id": ticket_id} + inspire_http_hook.call_api(endpoint=endpoint, data=request_data, method="POST") + + @task() + def set_author_create_workflow_status_to_completed(**context: dict) -> None: + status_name = "completed" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task + def empty_task() -> None: + # Logic to combine the results of branches + pass + + @task() + def set_author_create_workflow_status_to_error(**context: dict) -> None: + ti = context["ti"] + status_name = ti.xcom_pull(task_ids="create_author_on_inspire") + logger.info(f"Workflow status: {status_name}") + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + # task definitions + set_status_to_running_task = set_workflow_status_to_running() + create_author_on_inspire_task = create_author_on_inspire() + author_create_success_branch_task = author_create_success_branch() + author_check_approval_branch_task = author_check_approval_branch() + close_author_create_user_ticket_task = close_author_create_user_ticket() + create_author_create_curation_ticket_task = create_author_create_curation_ticket() + set_workflow_status_to_completed_task = ( + set_author_create_workflow_status_to_completed() + ) + set_workflow_status_to_error_task = set_author_create_workflow_status_to_error() + combine_ticket_and_no_ticket_task = empty_task() + + # task dependencies + ticket_branch = create_author_create_curation_ticket_task + ( + ticket_branch + >> close_author_create_user_ticket_task + >> set_workflow_status_to_completed_task + ) + + no_ticket_branch = combine_ticket_and_no_ticket_task + ( + no_ticket_branch + >> close_author_create_user_ticket_task + >> set_workflow_status_to_completed_task + ) + + author_check_approval_branch_task >> [ticket_branch, no_ticket_branch] + ( + set_status_to_running_task + >> create_author_on_inspire_task + >> author_create_success_branch_task + ) + author_create_success_branch_task >> [ + author_check_approval_branch_task, + set_workflow_status_to_error_task, + ] + + +author_create_approved_dag() diff --git a/workflows/dags/author/author_create/author_create_init.py b/workflows/dags/author/author_create/author_create_init.py new file mode 100644 index 000000000..fdd40b7cc --- /dev/null +++ b/workflows/dags/author/author_create/author_create_init.py @@ -0,0 +1,95 @@ +import datetime +import logging + +from airflow.decorators import dag, task +from airflow.models.param import Param +from hooks.backoffice.workflow_management_hook import WorkflowManagementHook +from hooks.backoffice.workflow_ticket_management_hook import ( + WorkflowTicketManagementHook, +) +from hooks.inspirehep.inspire_http_hook import InspireHttpHook +from include.utils.set_workflow_status import set_workflow_status_to_error + +logger = logging.getLogger(__name__) + + +@dag( + params={ + "workflow_id": Param(type="string", default=""), + "data": Param(type="object", default={}), + }, + start_date=datetime.datetime(2024, 5, 5), + schedule_interval=None, + catchup=False, + # TODO: what if callback fails? Data in backoffice not up to date! + on_failure_callback=set_workflow_status_to_error, +) +def author_create_initialization_dag(): + """ + Initialize a DAG for author create workflow. + + Tasks: + 1. create_ticket_on_author_create: Creates a ticket using the InspireHttpHook + to call the API endpoint. + 2. set_author_create_workflow_status_to_approval: Sets the workflow status + to "approval" using the WorkflowManagementHook. + + """ + inspire_http_hook = InspireHttpHook() + workflow_management_hook = WorkflowManagementHook() + workflow_ticket_management_hook = WorkflowTicketManagementHook() + + @task() + def set_workflow_status_to_running(**context): + status_name = "running" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task() + def set_schema(**context): + schema = "https://inspirehep.net/schemas/records/authors.json" + workflow_management_hook.partial_update_workflow( + workflow_id=context["params"]["workflow_id"], + workflow_partial_update_data={"data": {"$schema": schema}}, + ) + + @task() + def create_author_create_user_ticket(**context: dict) -> None: + endpoint = "/api/tickets/create" + request_data = { + "functional_category": "Author curation", + "template": "user_new_author", + "workflow_id": context["params"]["workflow_id"], + "subject": "test", # TODO: set the subject and description + "description": "test", + "caller_email": "", # leave empty + } + response = inspire_http_hook.call_api( + endpoint=endpoint, data=request_data, method="POST" + ) + logger.info(f"Ticket created. Response status code: {response.status_code}") + logger.info(response.json()) + workflow_ticket_management_hook.create_ticket_entry( + workflow_id=context["params"]["workflow_id"], + ticket_type="author_create_user", + ticket_id=response.json()["ticket_id"], + ) + + @task() + def set_author_create_workflow_status_to_approval(**context: dict) -> None: + status_name = "approval" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + # task dependencies + ( + set_workflow_status_to_running() + >> set_schema() + >> create_author_create_user_ticket() + >> set_author_create_workflow_status_to_approval() + ) + + +author_create_initialization_dag() diff --git a/workflows/dags/author/author_create/author_create_rejected.py b/workflows/dags/author/author_create/author_create_rejected.py new file mode 100644 index 000000000..7e012e7b2 --- /dev/null +++ b/workflows/dags/author/author_create/author_create_rejected.py @@ -0,0 +1,71 @@ +import datetime + +from airflow.decorators import dag, task +from airflow.models.param import Param +from hooks.backoffice.workflow_management_hook import WorkflowManagementHook +from hooks.backoffice.workflow_ticket_management_hook import ( + WorkflowTicketManagementHook, +) +from hooks.inspirehep.inspire_http_hook import InspireHttpHook +from include.utils.set_workflow_status import set_workflow_status_to_error + + +@dag( + params={ + "workflow_id": Param(type="string", default=""), + "data": Param(type="object", default={}), + }, + start_date=datetime.datetime(2024, 5, 5), + schedule_interval=None, + catchup=False, + # TODO: what if callback fails? Data in backoffice not up to date! + on_failure_callback=set_workflow_status_to_error, +) +def author_create_rejected_dag() -> None: + """ + This DAG defines the workflow for handling an author after reject action. + + Tasks: + 1. close_ticket_on_author_reject: Closes the ticket associated with the rejected + author. + 2. set_author_create_workflow_status_to_completed: Sets the status of + the author creation workflow to 'completed'. + """ + inspire_http_hook = InspireHttpHook() + workflow_management_hook = WorkflowManagementHook() + workflow_ticket_management_hook = WorkflowTicketManagementHook() + + @task() + def close_author_create_user_ticket(**context: dict) -> None: + ticket_type = "author_create_user" + ticket_id = workflow_ticket_management_hook.get_ticket( + workflow_id=context["params"]["workflow_id"], ticket_type=ticket_type + )["ticket_id"] + endpoint = "/tickets/resolve" # TODO: the URL for resolving dag will change + request_data = {"ticket_id": ticket_id} + inspire_http_hook.call_api(endpoint=endpoint, data=request_data, method="POST") + + @task() + def set_author_create_workflow_status_to_completed(**context: dict) -> None: + status_name = "completed" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task() + def set_workflow_status_to_running(**context): + status_name = "running" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + # task definitions + set_status_to_running_task = set_workflow_status_to_running() + close_ticket_task = close_author_create_user_ticket() + set_status_completed_task = set_author_create_workflow_status_to_completed() + + # task dependencies + set_status_to_running_task >> close_ticket_task >> set_status_completed_task + + +author_create_rejected_dag() diff --git a/workflows/dags/author/author_update/README.md b/workflows/dags/author/author_update/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/author_update/__init__.py b/workflows/dags/author/author_update/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/dags/author/author_update/author_update.py b/workflows/dags/author/author_update/author_update.py new file mode 100644 index 000000000..a4f0a2e7d --- /dev/null +++ b/workflows/dags/author/author_update/author_update.py @@ -0,0 +1,134 @@ +import datetime + +from airflow.decorators import dag, task +from airflow.models.param import Param +from hooks.backoffice.workflow_management_hook import WorkflowManagementHook +from hooks.backoffice.workflow_ticket_management_hook import ( + WorkflowTicketManagementHook, +) +from hooks.inspirehep.inspire_http_hook import InspireHttpHook +from hooks.inspirehep.inspire_http_record_management_hook import ( + InspireHTTPRecordManagementHook, +) +from include.utils.set_workflow_status import ( + get_wf_status_from_inspire_response, + set_workflow_status_to_error, +) + + +@dag( + start_date=datetime.datetime(2024, 5, 5), + schedule_interval=None, + params={ + "workflow_id": Param(type="string", default=""), + "data": Param(type="object", default={}), + }, + catchup=False, + on_failure_callback=set_workflow_status_to_error, # TODO: what if callback fails? Data in backoffice not up to date! +) +def author_update_dag(): + """ + DAG for updating author on Inspire. + + Tasks: + 1. Sets the workflow status to "running". + 2. Creates a ticket for author updates. + 3. Updates the author information in Inspire. + 4. Sets the workflow status to "completed". + + """ + inspire_http_hook = InspireHttpHook() + inspire_http_record_management_hook = InspireHTTPRecordManagementHook() + workflow_management_hook = WorkflowManagementHook() + workflow_ticket_management_hook = WorkflowTicketManagementHook() + + @task() + def set_author_update_workflow_status_to_running(**context): + status_name = "running" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task() + def create_ticket_on_author_update(**context): + endpoint = "/tickets/create-with-template" + request_data = { + "functional_category": "Author updates", + "template": "curator_update_author", + "workflow_id": context["params"]["workflow_id"], + "subject": "test", + "description": "test", + "caller_email": "", + } + response = inspire_http_hook.call_api( + endpoint=endpoint, data=request_data, method="POST" + ) + workflow_ticket_management_hook.create_ticket_entry( + workflow_id=context["params"]["workflow_id"], + ticket_type="author_update_curation", + ticket_id=response.json()["ticket_id"], + ) + + @task() + def update_author_on_inspire(**context): + workflow_data = workflow_management_hook.get_workflow( + workflow_id=context["params"]["workflow_id"] + ) + control_number = workflow_data["data"]["control_number"] + record_data = inspire_http_record_management_hook.get_record( + pid_type="authors", control_number=control_number + ) + updated_record_data = record_data["metadata"].update(workflow_data["data"]) + response = inspire_http_record_management_hook.update_record( + data=updated_record_data, + pid_type="authors", + control_number=control_number, + revision_id=record_data["revision_id"] + 1, + ) + status = get_wf_status_from_inspire_response(response) + return status + + @task() + def set_author_update_workflow_status_to_completed(**context): + status_name = "completed" + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + @task.branch(provide_context=True) + def author_update_success_branch(**context): + ti = context["ti"] + workflow_status = ti.xcom_pull(task_ids="update_author_on_inspire") + + if workflow_status == "completed": + return "set_author_update_workflow_status_to_completed" + else: + return "set_author_update_workflow_status_to_error" + + @task() + def set_author_update_workflow_status_to_error(**context): + ti = context["ti"] + status_name = ti.xcom_pull(task_ids="update_author_on_inspire") + workflow_management_hook.set_workflow_status( + status_name=status_name, workflow_id=context["params"]["workflow_id"] + ) + + # task definitions + set_workflow_status_to_running_task = set_author_update_workflow_status_to_running() + create_ticket_task = create_ticket_on_author_update() + update_author_on_inspire_task = update_author_on_inspire() + set_status_to_completed_task = set_author_update_workflow_status_to_completed() + set_status_to_error_task = set_author_update_workflow_status_to_error() + author_update_success_branch_task = author_update_success_branch() + + # task dependencies + ( + set_workflow_status_to_running_task + >> create_ticket_task + >> update_author_on_inspire_task + >> author_update_success_branch_task + >> [set_status_to_error_task, set_status_to_completed_task] + ) + + +author_update_dag_instance = author_update_dag() diff --git a/workflows/dags/happy_flow_dag.py b/workflows/dags/happy_flow_dag.py new file mode 100644 index 000000000..3b7325797 --- /dev/null +++ b/workflows/dags/happy_flow_dag.py @@ -0,0 +1,66 @@ +import datetime +import json + +from airflow.decorators import dag, task +from airflow.sensors.sql import SqlSensor + + +@dag(start_date=datetime.datetime(2021, 1, 1), schedule_interval=None) +def happy_flow_dag(): + @task + def fetch_document(filename: str) -> dict: + from include.utils.s3_client import get_s3_client + + s3_client = get_s3_client() + s3_client.download_file("inspire-incoming", filename, f"./{filename}") + with open(f"./{filename}") as f: + data = json.load(f) + return data + + @task() + def normalize_affiliations(data): + from hooks.inspire_connection_hook import call_inspire_api_with_hook + from include.inspire.affiliations_normalization import ( + assign_normalized_affiliations, + ) + + endpoint = "/curation/literature/affiliations-normalization" + request_data = {"authors": data["authors"], "workflow_id": 1} + result = call_inspire_api_with_hook(endpoint=endpoint, data=request_data) + data = assign_normalized_affiliations(result.json(), data=data) + return data + + @task.branch() + def auto_approval(data): + from include.inspire.approval import auto_approve + + if auto_approve(data): + return ["validate"] + return ["wait_for_approval"] + + wait_for_approval = SqlSensor( + task_id="wait_for_approval", + conn_id="inspire_db_connection", + poke_interval=2, + sql="select * from workflow_approval where id = '2307.13748'", + ) + + @task() + def validate(): + return + + fetch_document_task = fetch_document("test.json") + normalize_affiliations_task = normalize_affiliations(fetch_document_task) + auto_approval = auto_approval(normalize_affiliations_task) + validate_task = validate() + + ( + fetch_document_task + >> normalize_affiliations_task + >> auto_approval + >> [validate_task, wait_for_approval] + ) + wait_for_approval >> validate_task + + +happy_flow_dag() diff --git a/workflows/dags/process_until_breakpoint.py b/workflows/dags/process_until_breakpoint.py new file mode 100644 index 000000000..43690aa39 --- /dev/null +++ b/workflows/dags/process_until_breakpoint.py @@ -0,0 +1,73 @@ +import datetime +import json + +from airflow.decorators import dag, task +from airflow.operators.python import ShortCircuitOperator +from airflow.utils.trigger_rule import TriggerRule + + +@dag( + start_date=datetime.datetime(2021, 1, 1), + schedule_interval=None, + params={"approved": True}, +) +def process_untill_breakpoint(): + def check_approval(**context): + return not context["params"]["approved"] + + @task + def fetch_document(filename: str) -> dict: + from include.utils.s3_client import get_s3_client + + s3_client = get_s3_client() + s3_client.download_file("inspire-incoming", filename, f"./{filename}") + with open(f"./{filename}") as f: + data = json.load(f) + return data + + @task() + def normalize_affiliations(data): + from hooks.inspire_connection_hook import call_inspire_api_with_hook + from include.inspire.affiliations_normalization import ( + assign_normalized_affiliations, + ) + + endpoint = "/curation/literature/affiliations-normalization" + request_data = {"authors": data["authors"], "workflow_id": 1} + result = call_inspire_api_with_hook(endpoint=endpoint, data=request_data) + data = assign_normalized_affiliations(result.json(), data=data) + return data + + def auto_approval(**kwargs): + from include.inspire.approval import auto_approve + + data = kwargs["task_instance"].xcom_pull(task_ids="normalize_affiliations") + return bool(auto_approve(data)) + + @task(trigger_rule=TriggerRule.NONE_FAILED) + def validate(): + return + + check_approval = ShortCircuitOperator( + task_id="check_approval", + ignore_downstream_trigger_rules=False, + python_callable=check_approval, + provide_context=True, + ) + fetch_document_task = fetch_document("test.json") + normalize_affiliations_task = normalize_affiliations(fetch_document_task) + auto_approval = ShortCircuitOperator( + task_id="auto_approval", python_callable=auto_approval, provide_context=True + ) + validation = validate() + + ( + check_approval + >> fetch_document_task + >> normalize_affiliations_task + >> auto_approval + >> validation + ) + + +process_untill_breakpoint() diff --git a/workflows/logs/scheduler/latest b/workflows/logs/scheduler/latest new file mode 120000 index 000000000..57b11175f --- /dev/null +++ b/workflows/logs/scheduler/latest @@ -0,0 +1 @@ +2024-07-25 \ No newline at end of file diff --git a/workflows/plugins/__init__.py b/workflows/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/hooks/__init__.py b/workflows/plugins/hooks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/hooks/backoffice/__init__.py b/workflows/plugins/hooks/backoffice/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/hooks/backoffice/base.py b/workflows/plugins/hooks/backoffice/base.py new file mode 100644 index 000000000..7a266e30f --- /dev/null +++ b/workflows/plugins/hooks/backoffice/base.py @@ -0,0 +1,60 @@ +import requests +from airflow.models import Variable +from airflow.providers.http.hooks.http import HttpHook +from hooks.tenacity_config import tenacity_retry_kwargs +from requests import Response + + +class BackofficeHook(HttpHook): + """ + A hook to update the status of a workflow in the backoffice system. + + :param method: The HTTP method to use for the request (default: "GET"). + :type method: str + :param http_conn_id: The ID of the HTTP connection to use + (default: "backoffice_conn"). + :type http_conn_id: str + """ + + def __init__( + self, + method: str = "GET", + http_conn_id: str = "backoffice_conn", + headers: dict = None, + ) -> None: + super().__init__(method=method, http_conn_id=http_conn_id) + self.headers = headers or { + "Authorization": f'Token {Variable.get("backoffice_token")}', + "Accept": "application/json", + "Content-Type": "application/json", + } + + @property + def tenacity_retry_kwargs(self) -> dict: + return tenacity_retry_kwargs() + + def run( + self, + endpoint: str, + method: str = None, + data: dict = None, + headers: dict = None, + params: dict = None, + extra_options: dict = None, + ) -> Response: + extra_options = extra_options or {} + headers = headers or self.headers + method = method or self.method + + session = self.get_conn(headers) + + if not self.base_url.endswith("/") and not endpoint.startswith("/"): + url = self.base_url + "/" + endpoint + else: + url = self.base_url + endpoint + + req = requests.Request(method, url, json=data, headers=headers, params=params) + + prepped_request = session.prepare_request(req) + self.log.info("Sending '%s' to url: %s", method, url) + return self.run_and_check(session, prepped_request, extra_options) diff --git a/workflows/plugins/hooks/backoffice/workflow_management_hook.py b/workflows/plugins/hooks/backoffice/workflow_management_hook.py new file mode 100644 index 000000000..72519fd8c --- /dev/null +++ b/workflows/plugins/hooks/backoffice/workflow_management_hook.py @@ -0,0 +1,58 @@ +from hooks.backoffice.base import BackofficeHook +from requests import Response + + +class WorkflowManagementHook(BackofficeHook): + """ + A hook to update the status of a workflow in the backoffice system. + + :param method: The HTTP method to use for the request (default: "GET"). + :type method: str + :param http_conn_id: The ID of the HTTP connection to use + (default: "backoffice_conn"). + :type http_conn_id: str + """ + + def set_workflow_status(self, status_name: str, workflow_id: str) -> Response: + """ + Updates the status of a workflow in the backoffice system. + + :param status_name: The new status of the workflow. + :type status: str + :param workflow_id: The ID of the workflow to update. + :type workflow_id: str + """ + request_data = { + "status": status_name, + } + return self.partial_update_workflow( + workflow_partial_update_data=request_data, workflow_id=workflow_id + ) + + def get_workflow(self, workflow_id: str) -> dict: + endpoint = f"api/workflows/{workflow_id}" + response = self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, method="GET", endpoint=endpoint + ) + response = self.run(endpoint=endpoint, headers=self.headers) + return response.json() + + def update_workflow(self, workflow_id: str, workflow_data: dict) -> Response: + endpoint = f"api/workflows/{workflow_id}/" + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="PUT", + data=workflow_data, + endpoint=endpoint, + ) + + def partial_update_workflow( + self, workflow_id: str, workflow_partial_update_data: dict + ) -> Response: + endpoint = f"api/workflow-update/{workflow_id}/" + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="PATCH", + data=workflow_partial_update_data, + endpoint=endpoint, + ) diff --git a/workflows/plugins/hooks/backoffice/workflow_ticket_management_hook.py b/workflows/plugins/hooks/backoffice/workflow_ticket_management_hook.py new file mode 100644 index 000000000..0f1945885 --- /dev/null +++ b/workflows/plugins/hooks/backoffice/workflow_ticket_management_hook.py @@ -0,0 +1,50 @@ +from hooks.backoffice.base import BackofficeHook +from requests import Response + + +class WorkflowTicketManagementHook(BackofficeHook): + """ + A hook to update the status of a workflow in the backoffice system. + + :param method: The HTTP method to use for the request (default: "GET"). + :type method: str + :param http_conn_id: The ID of the HTTP connection to use ( + default: "backoffice_conn"). + :type http_conn_id: str + """ + + def __init__( + self, + method: str = "GET", + http_conn_id: str = "backoffice_conn", + headers: dict = None, + ) -> None: + super().__init__(method, http_conn_id, headers) + self.endpoint = "api/workflow-ticket/" + + def get_ticket(self, workflow_id: str, ticket_type: str) -> dict: + endpoint = f"api/workflow-ticket/{workflow_id}/" + params = {"ticket_type": ticket_type} + response = self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="GET", + endpoint=endpoint, + params=params, + ) + return response.json() + + def create_ticket_entry( + self, workflow_id: str, ticket_id: str, ticket_type: str + ) -> Response: + endpoint = "api/workflow-ticket/" + data = { + "ticket_type": ticket_type, + "ticket_id": ticket_id, + "workflow_id": workflow_id, + } + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="POST", + data=data, + endpoint=endpoint, + ) diff --git a/workflows/plugins/hooks/inspirehep/__init__.py b/workflows/plugins/hooks/inspirehep/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/hooks/inspirehep/inspire_http_hook.py b/workflows/plugins/hooks/inspirehep/inspire_http_hook.py new file mode 100644 index 000000000..094ff451f --- /dev/null +++ b/workflows/plugins/hooks/inspirehep/inspire_http_hook.py @@ -0,0 +1,65 @@ +import json +import logging + +import requests +from airflow.models import Variable +from airflow.providers.http.hooks.http import HttpHook +from hooks.tenacity_config import tenacity_retry_kwargs +from requests import Response + +logger = logging.getLogger() + + +class InspireHttpHook(HttpHook): + """ + Hook to interact with Inspire API + It overrides the original `run` method in HttpHook so that + we can pass data argument as data, not params + """ + + def __init__(self, method="GET", http_conn_id="inspire_connection"): + super().__init__(method=method, http_conn_id=http_conn_id) + + @property + def tenacity_retry_kwargs(self) -> dict: + return tenacity_retry_kwargs() + + @property + def headers(self) -> dict: + return { + "Authorization": f'Bearer {Variable.get("inspire_token")}', + "Accept": "application/vnd+inspire.record.raw+json", + } + + def run( + self, + endpoint: str, + method: str = None, + json: dict = None, + data: dict = None, + headers: dict = None, + extra_options: dict = None, + ): + extra_options = extra_options or {} + method = method or self.method + session = self.get_conn(headers) + + if not self.base_url.endswith("/") and not endpoint.startswith("/"): + url = self.base_url + "/" + endpoint + else: + url = self.base_url + endpoint + + req = requests.Request(method, url, json=json, data=data, headers=headers) + + prepped_request = session.prepare_request(req) + self.log.info("Sending '%s' to url: %s", method, url) + return self.run_and_check(session, prepped_request, extra_options) + + def call_api(self, method: str, endpoint: str, data: dict) -> Response: + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + endpoint=endpoint, + headers=self.headers, + data=json.dumps(data), + method=method, + ) diff --git a/workflows/plugins/hooks/inspirehep/inspire_http_record_management_hook.py b/workflows/plugins/hooks/inspirehep/inspire_http_record_management_hook.py new file mode 100644 index 000000000..4cd57ba3f --- /dev/null +++ b/workflows/plugins/hooks/inspirehep/inspire_http_record_management_hook.py @@ -0,0 +1,47 @@ +from hooks.inspirehep.inspire_http_hook import InspireHttpHook +from requests import Response + + +class InspireHTTPRecordManagementHook(InspireHttpHook): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def update_record( + self, data: dict, pid_type: str, control_number: int, revision_id: str + ) -> Response: + update_headers = {**self.headers, "If-Match": f'"{revision_id - 1}"'} + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="PUT", + headers=update_headers, + json=data, + endpoint=f"{pid_type}/{control_number}", + ) + + def get_record(self, pid_type: str, control_number: int) -> Response: + response = self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="GET", + headers=self.headers, + endpoint=f"/{pid_type}/{control_number}", + ) + return response.json() + + def get_record_revision_id(self, pid_type: str, control_number: int) -> int: + response = self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="GET", + headers=self.headers, + endpoint=f"/{pid_type}/{control_number}", + ) + response.raise_for_status() + return response.json()["revision_id"] + + def post_record(self, data: dict, pid_type: str) -> Response: + return self.run_with_advanced_retry( + _retry_args=self.tenacity_retry_kwargs, + method="POST", + headers=self.headers, + json=data, + endpoint=f"api/{pid_type}", + ) diff --git a/workflows/plugins/hooks/tenacity_config.py b/workflows/plugins/hooks/tenacity_config.py new file mode 100644 index 000000000..041ca2a9b --- /dev/null +++ b/workflows/plugins/hooks/tenacity_config.py @@ -0,0 +1,9 @@ +import tenacity + + +def tenacity_retry_kwargs(): + return { + "wait": tenacity.wait_exponential(), + "stop": tenacity.stop_after_attempt(5), + "retry": tenacity.retry_if_exception_type(Exception), + } diff --git a/workflows/plugins/include/__init__.py b/workflows/plugins/include/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/include/inspire/__init__.py b/workflows/plugins/include/inspire/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/include/inspire/affiliations_normalization.py b/workflows/plugins/include/inspire/affiliations_normalization.py new file mode 100644 index 000000000..3d99b0bca --- /dev/null +++ b/workflows/plugins/include/inspire/affiliations_normalization.py @@ -0,0 +1,10 @@ +def assign_normalized_affiliations(normalized_affiliations, data): + for author, normalized_affiliation in zip( + data.get("authors", []), normalized_affiliations, strict=False + ): + author_affiliations = author.get("affiliations", []) + if author_affiliations: + continue + if normalized_affiliation: + author["affiliations"] = normalized_affiliation + return data diff --git a/workflows/plugins/include/inspire/approval.py b/workflows/plugins/include/inspire/approval.py new file mode 100644 index 000000000..3f95f01a3 --- /dev/null +++ b/workflows/plugins/include/inspire/approval.py @@ -0,0 +1,70 @@ +from itertools import chain + +ARXIV_CATEGORIES = { + "core": ["hep-ex", "hep-lat", "hep-ph", "hep-th"], + "non-core": [ + "astro-ph.CO", + "astro-ph.HE", + "gr-qc", + "nucl-ex", + "nucl-th", + "physics.acc-ph", + "physics.ins-det", + "quant-ph", + ], +} + + +def auto_approve(data): + """Check if auto approve the current ingested article. + + Arguments: + obj: a workflow object. + eng: a workflow engine. + + Return: + bool: True when the record belongs to an arXiv category that is fully + harvested or if the primary category is `physics.data-an`, otherwise + False. + """ + return has_fully_harvested_category(data) or physics_data_an_is_primary_category( + data + ) + + +def has_fully_harvested_category(record): + """Check if the record in `obj.data` has fully harvested categories. + + Arguments: + record(dict): the ingested article. + + Return: + bool: True when the record belongs to an arXiv category that is fully + harvested, otherwise False. + """ + record_categories = set( + chain.from_iterable( + eprint["categories"] for eprint in record.get("arxiv_eprints", []) + ) + ) + harvested_categories = ARXIV_CATEGORIES + return ( + len( + record_categories + & set( + harvested_categories.get("core") + harvested_categories.get("non-core") + ) + ) + > 0 + ) + + +def physics_data_an_is_primary_category(record): + record_categories = list( + chain.from_iterable( + eprint["categories"] for eprint in record.get("arxiv_eprints", []) + ) + ) + if record_categories: + return record_categories[0] == "physics.data-an" + return False diff --git a/workflows/plugins/include/utils/__init__.py b/workflows/plugins/include/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/include/utils/s3_client.py b/workflows/plugins/include/utils/s3_client.py new file mode 100644 index 000000000..0f5809d49 --- /dev/null +++ b/workflows/plugins/include/utils/s3_client.py @@ -0,0 +1,14 @@ +import os + +import boto3 + + +def get_s3_client(): + s3_resource = boto3.client( + "s3", + endpoint_url=os.getenv("S3_HOST"), + aws_access_key_id=os.getenv("S3_USER"), + aws_secret_access_key=os.getenv("S3_PASSWORD"), + verify=False, + ) + return s3_resource diff --git a/workflows/plugins/include/utils/set_workflow_status.py b/workflows/plugins/include/utils/set_workflow_status.py new file mode 100644 index 000000000..db6326f3e --- /dev/null +++ b/workflows/plugins/include/utils/set_workflow_status.py @@ -0,0 +1,46 @@ +import logging + +from hooks.backoffice.workflow_management_hook import WorkflowManagementHook +from requests import Response + +logger = logging.getLogger(__name__) + + +def get_wf_status_from_inspire_response(response: Response) -> str: + """ + Sets the workflow status in the Airflow xvom based on the response from INSPIRE. + + Args: + response (Response): The response object from the external service. + context (dict): The Airflow context. + workflow_id (str): The identifier for the workflow. + """ + if response.ok: + workflow_status = "completed" + elif ( + response.status_code == 400 + and "validation error" in response.json()["message"].lower() + ): + workflow_status = "validation_error" + else: + workflow_status = "error" + + return workflow_status + + +def set_workflow_status_to_error(context: dict) -> None: + """ + Sets the workflow status to error. + + Args: + workflow_id (str): The identifier for the workflow. + """ + logger.info("Setting workflow status to error") + response = WorkflowManagementHook().set_workflow_status( + status_name="error", workflow_id=context["params"]["workflow_id"] + ) + try: + response.raise_for_status() + except Exception as e: + logger.error(f"Error setting workflow status to error: {e}") + raise e diff --git a/workflows/plugins/operators/__init__.py b/workflows/plugins/operators/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/workflows/plugins/operators/short_circuit_operator.py b/workflows/plugins/operators/short_circuit_operator.py new file mode 100644 index 000000000..a738dfe2b --- /dev/null +++ b/workflows/plugins/operators/short_circuit_operator.py @@ -0,0 +1,55 @@ +from airflow.operators.python import ShortCircuitOperator, SkipMixin + + +class CustomShortCircuitOperator(ShortCircuitOperator, SkipMixin): + """ + Allows a workflow to continue only if a condition is met. Otherwise, the + workflow "short-circuits" and downstream tasks that only rely on this operator + are skipped. + + The ShortCircuitOperator is derived from the PythonOperator. It evaluates a + condition and short-circuits the workflow if the condition is False. Any + downstream tasks that only rely on this operator are marked with a state of + "skipped". + If the condition is True, downstream tasks proceed as normal. + + The condition is determined by the result of `python_callable`. + """ + + def __init__(self, skip_till_task_id=None, **kwargs): + super().__init__(ignore_downstream_trigger_rules=False, **kwargs) + self.skip_till_task_id = skip_till_task_id + + def find_tasks_to_skip(self, task, found_tasks=None): + if not found_tasks: + found_tasks = [] + direct_relatives = task.get_direct_relatives(upstream=False) + self.log.info(f"relatices {direct_relatives}") + for t in direct_relatives: + if self.skip_till_task_id and t.task_id == self.skip_till_task_id: + self.log.info(f"found task {t.task_id}. exiting") + break + if len(t.upstream_task_ids) == 1: + self.log.info(f"appending task {t.task_id}") + found_tasks.append(t) + self.find_tasks_to_skip(t, found_tasks) + return found_tasks + + def execute(self, context): + condition = super().execute(context) + self.log.info("Condition result is %s", condition) + + if condition: + self.log.info("Proceeding with downstream tasks...") + return + + self.log.info("Skipping downstream tasks that only rely on this path...") + + tasks_to_skip = self.find_tasks_to_skip(context["task"]) + self.log.info("Tasks to skip: %s", tasks_to_skip) + + if tasks_to_skip: + self.log.info(f"Skipping {tasks_to_skip}") + self.skip(context["dag_run"], context["ti"].execution_date, tasks_to_skip) + + self.log.info("Done.") diff --git a/workflows/requirements-test.txt b/workflows/requirements-test.txt new file mode 100644 index 000000000..0c69b775f --- /dev/null +++ b/workflows/requirements-test.txt @@ -0,0 +1,3 @@ +pytest +coverage +pytest-cov diff --git a/workflows/requirements.txt b/workflows/requirements.txt new file mode 100644 index 000000000..a6b10ba9f --- /dev/null +++ b/workflows/requirements.txt @@ -0,0 +1 @@ +apache-airflow==2.8.3 diff --git a/workflows/scripts/connections/connections.json b/workflows/scripts/connections/connections.json new file mode 100644 index 000000000..15551c5dc --- /dev/null +++ b/workflows/scripts/connections/connections.json @@ -0,0 +1,32 @@ +{ + "inspire_connection": { + "conn_type": "http", + "description": "", + "login": "", + "password": null, + "host": "https://inspirebeta.net", + "port": null, + "schema": "", + "extra": "" + }, + "inspire_db_connection": { + "conn_type": "postgres", + "description": "", + "login": "inspire", + "password": "inspire", + "host": "localhost", + "port": 5433, + "schema": "inspire", + "extra": "{}" + }, + "backoffice_conn": { + "conn_type": "http", + "description": "", + "login": "", + "password": null, + "host": "http://host.docker.internal:8000", + "port": null, + "schema": "", + "extra": "" + } +} diff --git a/workflows/scripts/setup_s3.sh b/workflows/scripts/setup_s3.sh new file mode 100644 index 000000000..649cc2a7f --- /dev/null +++ b/workflows/scripts/setup_s3.sh @@ -0,0 +1,5 @@ +sleep 10 +/usr/bin/mc alias set airflow http://s3:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD} +/usr/bin/mc mb airflow/inspire-incoming +/usr/bin/mc mirror /opt/airflow/data airflow/inspire-incoming +exit 0 diff --git a/workflows/scripts/setup_tables_inspire_db.sh b/workflows/scripts/setup_tables_inspire_db.sh new file mode 100644 index 000000000..7bc2eb4dc --- /dev/null +++ b/workflows/scripts/setup_tables_inspire_db.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL + +CREATE TABLE IF NOT EXISTS workflow_approval ( + id text NOT NULL, + approval text NOT NULL +); + +EOSQL diff --git a/workflows/scripts/variables/variables.json b/workflows/scripts/variables/variables.json new file mode 100644 index 000000000..95ce3561e --- /dev/null +++ b/workflows/scripts/variables/variables.json @@ -0,0 +1,4 @@ +{ + "backoffice_token": "2e04111a61e8f5ba6ecec52af21bbb9e81732085", + "inspire_token": "CHANGE_ME" +} diff --git a/workflows/tests/test_example.py b/workflows/tests/test_example.py new file mode 100644 index 000000000..813df6020 --- /dev/null +++ b/workflows/tests/test_example.py @@ -0,0 +1,2 @@ +def test_example(): + assert True