diff --git a/backoffice-repo/LICENSE b/LICENSE similarity index 100% rename from backoffice-repo/LICENSE rename to LICENSE diff --git a/README.md b/README.md index 1a74e4322..ff52f37ca 100644 --- a/README.md +++ b/README.md @@ -103,10 +103,18 @@ $ brew file install ### Make This will prepare the whole inspire development with demo records: - ```bash make run -make setup +``` + +This spinup the whole inspirehep development with demo records but without the backoffice +```bash +make run-inspirehep +``` + +This will spin up a backoffice +```bash +make run-backoffice ``` You can stop it by simply run diff --git a/backoffice-repo/Makefile b/backoffice-repo/Makefile deleted file mode 100644 index c782ac233..000000000 --- a/backoffice-repo/Makefile +++ /dev/null @@ -1,30 +0,0 @@ -# Makefile - -# Set the AIRFLOW_HOME environment variable -export AIRFLOW_HOME=$(shell pwd)/workflows -export AIRFLOW_PROJ_DIR=$(AIRFLOW_HOME) - -# Default target -all: run - -run: - docker compose up - -clean: - docker compose down - -init: backoffice-init workflows-init - -backoffice-init: - docker compose exec backoffice-webserver python manage.py create_groups - docker compose exec backoffice-webserver python manage.py loaddata backoffice/users/fixtures/users.json - docker compose exec backoffice-webserver python manage.py loaddata backoffice/users/fixtures/tokens.json - echo "\033[1;32memail: admin@admin.com / password: admin \033[0m" - echo "Backoffice initialized" - -workflows-init: - docker compose exec airflow-webserver /entrypoint airflow connections import ./scripts/connections/connections.json - docker compose exec airflow-webserver /entrypoint airflow variables import ./scripts/variables/variables.json - echo "\033[31mCHANGE inspire_token in Admin->Variables\033[0m" - echo "\033[1;32musername: airflow / password: airflow \033[0m" - echo "Workflows initialized" diff --git a/backoffice-repo/README.md b/backoffice-repo/README.md index c67549e2e..24ab5ce25 100644 --- a/backoffice-repo/README.md +++ b/backoffice-repo/README.md @@ -1,21 +1,22 @@ # INSPIRE Backoffice -This repo consists of Airflow Dags and Django backoffice. To run it: +The main services consists of a Django backoffice and Airflow Dags and.: +# Run it with docker: + +Go to the parent directory and run: ```bash -make run -make init +make run-backoffice ``` - # Deploying to QA Current development process requires: ## Deploy to backoffice: - Go to Harbor: https://registry.cern.ch/harbor/projects/2086/repositories/inspire%2Fbackoffice/artifacts-tab - - Copy the tag of the lastest image into https://github.com/cern-sis/kubernetes-inspire/blob/main/backoffice/environments/backoffice/kustomization.yml#L66 + - Copy the tag of the lastest image into https://github.com/cern-sis/kubernetes-inspire/blob/main/backoffice/environments/backoffice/kustomization.yml#L68 - Go to https://argocd.sis-inspire-20240708.siscern.org/applications/argocd/inspire-backoffice and synchronize ## Deploy to airflow: - Go to Harbor: https://registry.cern.ch/harbor/projects/2086/repositories/inspire%2Fworkflows/artifacts-tab - - Copy the tag of the lastest image into [https://github.com/cern-sis/kubernetes-inspire/blob/main/backoffice/environments/backoffice/kustomization.yml#L66](https://github.com/cern-sis/kubernetes-airflow/blob/main/airflow/environments/airflow-inspire-dev/kustomization.yml#L19) + - Copy the tag of the lastest image into https://github.com/cern-sis/kubernetes-airflow/blob/main/airflow/environments/airflow-inspire-dev/kustomization.yml#L19 - Go to https://argocd-airflow.siscern.org/applications/argocd/airflow-inspire-dev and synchronize diff --git a/backoffice-repo/docker-compose.yaml b/backoffice-repo/docker-compose.yaml deleted file mode 100644 index 200c0d50f..000000000 --- a/backoffice-repo/docker-compose.yaml +++ /dev/null @@ -1,369 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2024 CERN. -# -# inspirehep is free software; you can redistribute it and/or modify it under -# the terms of the MIT License; see LICENSE file for more details. - -# Core for e2e and local development - -# -*- coding: utf-8 -*- -# -# Copyright (C) 2019 CERN. -# -# inspirehep is free software; you can redistribute it and/or modify it under -# the terms of the MIT License; see LICENSE file for more details. - -# Core for e2e and local development - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. -# -# WARNING: This configuration is for local development. Do not use it in a production deployment. -# -# This configuration supports basic configuration using environment variables or an .env file -# The following variables are supported: -# -# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. -# Default: apache/airflow:2.8.3 -# AIRFLOW_UID - User ID in Airflow containers -# Default: 50000 -# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed. -# Default: . -# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode -# -# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). -# Default: airflow -# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). -# Default: airflow -# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. -# Use this option ONLY for quick checks. Installing requirements at container -# startup is done EVERY TIME the service is started. -# A better way is to build a custom image or extend the official image -# as described in https://airflow.apache.org/docs/docker-stack/build.html. -# Default: '' -# -# Feel free to modify this file to suit your needs. ---- -x-airflow-common: &airflow-common - # In order to add custom dependencies or upgrade provider packages you can use your extended image. - # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml - # and uncomment the "build" line below, Then run `docker-compose build` to build the images. - build: - context: backoffice-repo/workflows - dockerfile: Dockerfile.local - environment: &airflow-common-env - AIRFLOW__CORE__EXECUTOR: CeleryExecutor - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://inspirehep:inspirehep@db/inspirehep - AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://inspirehep:inspirehep@db/inspirehep - AIRFLOW__CELERY__BROKER_URL: redis://:@cache:6379/0 - AIRFLOW__CORE__FERNET_KEY: '' - AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'false' - AIRFLOW__CORE__LOAD_EXAMPLES: 'false' - AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' - AIRFLOW__WEBSERVER__RELOAD_ON_PLUGIN_CHANGE: 'true' # used when modifying plugins - # yamllint disable rule:line-length - # Use simple http server on scheduler for health checks - # See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server - # yamllint enable rule:line-length - AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' - # WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks - # for other purpose (development, test and especially production usage) build/extend Airflow image. - _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} - volumes: - - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags - - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs - - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config - - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins - - ${AIRFLOW_PROJ_DIR:-.}/scripts:/opt/airflow/scripts - - ${AIRFLOW_PROJ_DIR:-.}/tests:/opt/airflow/tests - - user: '${AIRFLOW_UID:-50000}:0' - depends_on: &airflow-common-depends-on - cache: - condition: service_healthy - db: - condition: service_healthy - -services: - backoffice-webserver: - build: - context: backoffice-repo/backoffice - dockerfile: Dockerfile - depends_on: - cache: - condition: service_started - db: - condition: service_started - es: - condition: service_healthy - volumes: - - ./backoffice-repo/backoffice:/app:z - env_file: - - ./backoffice-repo/backoffice/.envs/local/.django - - ./backoffice-repo/backoffice/.envs/local/.postgres - ports: - - '8001:8001' - command: sh -c 'poetry run python manage.py migrate && poetry run python manage.py opensearch index rebuild --force && poetry run python manage.py opensearch document index --force && poetry run python manage.py runserver 0.0.0.0:8001' - cache: - extends: - file: docker-compose.services.yml - service: cache - ports: - - '6379:6379' - db: - extends: - file: docker-compose.services.yml - service: db - ports: - - '5432:5432' - command: postgres -c 'max_connections=200' - mq: - extends: - file: docker-compose.services.yml - service: mq - ports: - - '5672:5672' - - '15672:15672' - es: - extends: - file: docker-compose.services.yml - service: es - ports: - - '9200:9200' - ui: - image: inspirehep/nginx:1.19-with-vts - ports: - - '8080:8080' - volumes: - - ./ui/docker/nginx/config/dev.conf:/etc/nginx/conf.d/default.conf - depends_on: - - hep-ui - - hep-web - - record-editor - hep-ui: - build: - context: ui - dockerfile: Dockerfile-local - ports: - - '3000:3000' - volumes: - - './ui/src:/usr/src/app/src' - - './ui/package.json:/usr/src/app/package.json' - - './ui/yarn.lock:/usr/src/app/yarn.lock' - record-editor: - build: - context: record-editor - dockerfile: Dockerfile-local - ports: - - '8081:8081' - volumes: - - ./record-editor/docker/nginx/config/nginx.conf:/etc/nginx/conf.d/default.conf - hep-web: - extends: - file: docker-compose.services.yml - service: app - volumes: - - ./backend:/opt/inspire - ports: - - '8000:8000' - - '7777:7777' - entrypoint: gunicorn - command: --reload -t 99999 -b 0.0.0.0:8000 --access-logfile "-" --error-logfile - inspirehep.wsgi:application - hep-worker: - extends: - file: docker-compose.services.yml - service: app - entrypoint: celery - volumes: - - ./backend:/opt/inspire - command: -A inspirehep.celery worker -l INFO --purge --queues celery,indexer_task,matcher,assign,redirect_references - depends_on: - cache: - condition: service_started - db: - condition: service_started - es: - condition: service_healthy - mq: - condition: service_started - flower: - extends: - file: docker-compose.services.yml - service: flower - ports: - - '5555:5555' - s3: - extends: - file: docker-compose.services.yml - service: s3 - ports: - - '4572:4572' - - '4566:4566' - airflow-webserver: - <<: *airflow-common - command: webserver - ports: - - '8070:8080' - healthcheck: - test: ['CMD', 'curl', '--fail', 'http://localhost:8070/health'] - interval: 30s - timeout: 10s - retries: 5 - start_period: 30s - restart: always - depends_on: - <<: *airflow-common-depends-on - airflow-init: - condition: service_completed_successfully - - airflow-scheduler: - <<: *airflow-common - command: scheduler - healthcheck: - test: ['CMD', 'curl', '--fail', 'http://localhost:8974/health'] - interval: 30s - timeout: 10s - retries: 5 - start_period: 30s - restart: always - depends_on: - <<: *airflow-common-depends-on - airflow-init: - condition: service_completed_successfully - - airflow-worker: - <<: *airflow-common - command: celery worker - healthcheck: - # yamllint disable rule:line-length - test: - - 'CMD-SHELL' - - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' - interval: 30s - timeout: 10s - retries: 5 - start_period: 30s - environment: - <<: *airflow-common-env - # Required to handle warm shutdown of the celery workers properly - # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation - DUMB_INIT_SETSID: '0' - restart: always - depends_on: - <<: *airflow-common-depends-on - airflow-init: - condition: service_completed_successfully - - airflow-triggerer: - <<: *airflow-common - command: triggerer - healthcheck: - test: - [ - 'CMD-SHELL', - 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"', - ] - interval: 30s - timeout: 10s - retries: 5 - start_period: 30s - restart: always - depends_on: - <<: *airflow-common-depends-on - airflow-init: - condition: service_completed_successfully - - airflow-init: - <<: *airflow-common - entrypoint: /bin/bash - # yamllint disable rule:line-length - command: - - -c - - | - if [[ -z "${AIRFLOW_UID}" ]]; then - echo - echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" - echo "If you are on Linux, you SHOULD follow the instructions below to set " - echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." - echo "For other operating systems you can get rid of the warning with manually created .env file:" - echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" - echo - fi - one_meg=1048576 - mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) - cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) - disk_available=$$(df / | tail -1 | awk '{print $$4}') - warning_resources="false" - if (( mem_available < 4000 )) ; then - echo - echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" - echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" - echo - warning_resources="true" - fi - if (( cpus_available < 2 )); then - echo - echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" - echo "At least 2 CPUs recommended. You have $${cpus_available}" - echo - warning_resources="true" - fi - if (( disk_available < one_meg * 10 )); then - echo - echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" - echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" - echo - warning_resources="true" - fi - if [[ $${warning_resources} == "true" ]]; then - echo - echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" - echo "Please follow the instructions to increase amount of resources available:" - echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" - echo - fi - mkdir -p /sources/logs /sources/dags /sources/plugins - chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} - exec /entrypoint airflow version - # yamllint enable rule:line-length - environment: - <<: *airflow-common-env - _AIRFLOW_DB_MIGRATE: 'true' - _AIRFLOW_WWW_USER_CREATE: 'true' - _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} - _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} - _PIP_ADDITIONAL_REQUIREMENTS: '' - user: '0:0' - volumes: - - ${AIRFLOW_PROJ_DIR:-.}:/sources - - airflow-cli: - <<: *airflow-common - profiles: - - debug - environment: - <<: *airflow-common-env - CONNECTION_CHECK_MAX_COUNT: '0' - # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 - command: - - bash - - -c - - airflow