diff --git a/cli/tests/conftest.py b/cli/tests/conftest.py index 958ec5369..8e90d24b2 100644 --- a/cli/tests/conftest.py +++ b/cli/tests/conftest.py @@ -45,6 +45,9 @@ os.environ["POSTGRES_PORT"] = "7001" os.environ["PIXL_DB_NAME"] = "pixl" +os.environ["ORTHANC_ANON_USERNAME"] = "orthanc" +os.environ["ORTHANC_ANON_PASSWORD"] = "orthanc" # noqa: S105, hardcoded password + @pytest.fixture(autouse=True) def export_dir(tmp_path_factory: pytest.TempPathFactory) -> Path: diff --git a/docker-compose.yml b/docker-compose.yml index 9ac587402..4125666b2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -version: "3.8" - ################################################################################ # Common @@ -20,342 +18,348 @@ x-http-proxy: &http-proxy ${HTTP_PROXY} x-https-proxy: &https-proxy ${HTTPS_PROXY} x-no-proxy: &no-proxy localhost,0.0.0.0,127.0.0.1,uclvlddpragae07,hasher-api,orthanc-raw x-proxy-common: &proxy-common - HTTP_PROXY: *http-proxy - http_proxy: *http-proxy - HTTPS_PROXY: *https-proxy - https_proxy: *https-proxy - NO_PROXY: *no-proxy - no_proxy: *no-proxy + HTTP_PROXY: *http-proxy + http_proxy: *http-proxy + HTTPS_PROXY: *https-proxy + https_proxy: *https-proxy + NO_PROXY: *no-proxy + no_proxy: *no-proxy x-build-args-common: &build-args-common - <<: [*proxy-common] + <<: [*proxy-common] x-pixl-common-env: &pixl-common-env - DEBUG: ${DEBUG} - LOG_LEVEL: ${LOG_LEVEL} + DEBUG: ${DEBUG} + LOG_LEVEL: ${LOG_LEVEL} x-pixl-rabbit-mq: &pixl-rabbit-mq - RABBITMQ_HOST: "queue" # Name of the queue service - RABBITMQ_PORT: "5672" - RABBITMQ_USERNAME: ${RABBITMQ_USERNAME} - RABBITMQ_PASSWORD: ${RABBITMQ_PASSWORD} + RABBITMQ_HOST: "queue" # Name of the queue service + RABBITMQ_PORT: "5672" + RABBITMQ_USERNAME: ${RABBITMQ_USERNAME} + RABBITMQ_PASSWORD: ${RABBITMQ_PASSWORD} x-emap-db: &emap-db - EMAP_UDS_HOST: ${EMAP_UDS_HOST} - EMAP_UDS_PORT: ${EMAP_UDS_PORT} - EMAP_UDS_NAME: ${EMAP_UDS_NAME} - EMAP_UDS_USER: ${EMAP_UDS_USER} - EMAP_UDS_PASSWORD: ${EMAP_UDS_PASSWORD} - EMAP_UDS_SCHEMA_NAME: ${EMAP_UDS_SCHEMA_NAME} + EMAP_UDS_HOST: ${EMAP_UDS_HOST} + EMAP_UDS_PORT: ${EMAP_UDS_PORT} + EMAP_UDS_NAME: ${EMAP_UDS_NAME} + EMAP_UDS_USER: ${EMAP_UDS_USER} + EMAP_UDS_PASSWORD: ${EMAP_UDS_PASSWORD} + EMAP_UDS_SCHEMA_NAME: ${EMAP_UDS_SCHEMA_NAME} x-pixl-db: &pixl-db - PIXL_DB_HOST: ${PIXL_DB_HOST} - PIXL_DB_PORT: ${PIXL_DB_PORT} - PIXL_DB_USER: ${PIXL_DB_USER} - PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} - PIXL_DB_NAME: ${PIXL_DB_NAME} + PIXL_DB_HOST: ${PIXL_DB_HOST} + PIXL_DB_PORT: ${PIXL_DB_PORT} + PIXL_DB_USER: ${PIXL_DB_USER} + PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} + PIXL_DB_NAME: ${PIXL_DB_NAME} x-azure-keyvault: &azure-keyvault - AZURE_CLIENT_ID: ${EXPORT_AZ_CLIENT_ID} - AZURE_CLIENT_SECRET: ${EXPORT_AZ_CLIENT_PASSWORD} - AZURE_TENANT_ID: ${EXPORT_AZ_TENANT_ID} - AZURE_KEY_VAULT_NAME: ${EXPORT_AZ_KEY_VAULT_NAME} + AZURE_CLIENT_ID: ${EXPORT_AZ_CLIENT_ID} + AZURE_CLIENT_SECRET: ${EXPORT_AZ_CLIENT_PASSWORD} + AZURE_TENANT_ID: ${EXPORT_AZ_TENANT_ID} + AZURE_KEY_VAULT_NAME: ${EXPORT_AZ_KEY_VAULT_NAME} x-logs-volume: &logs-volume - type: volume - source: logs - target: /logs + type: volume + source: logs + target: /logs volumes: - logs: - orthanc-anon-data: - orthanc-raw-data: - postgres-data: - exports: - rabbitmq: + logs: + orthanc-anon-data: + orthanc-raw-data: + postgres-data: + exports: + rabbitmq: networks: - pixl-net: + pixl-net: ################################################################################ # Services services: - hasher-api: - build: - context: . - dockerfile: ./docker/hasher-api/Dockerfile - args: - <<: *build-args-common - environment: - <<: [*proxy-common, *pixl-common-env] - AZURE_CLIENT_ID: ${HASHER_API_AZ_CLIENT_ID} - AZURE_CLIENT_SECRET: ${HASHER_API_AZ_CLIENT_PASSWORD} - AZURE_TENANT_ID: ${HASHER_API_AZ_TENANT_ID} - AZURE_KEY_VAULT_NAME: ${HASHER_API_AZ_KEY_VAULT_NAME} - AZURE_KEY_VAULT_SECRET_NAME: ${HASHER_API_AZ_KEY_VAULT_SECRET_NAME} - env_file: - - ./docker/common.env - ports: - - "${HASHER_API_PORT}:8000" - volumes: - - *logs-volume - networks: - - pixl-net - healthcheck: - test: ["CMD", "curl", "-f", "http://hasher-api:8000/heart-beat"] - interval: 10s - timeout: 30s - retries: 5 - restart: "no" + hasher-api: + build: + context: . + dockerfile: ./docker/hasher-api/Dockerfile + args: + <<: *build-args-common + environment: + <<: [*proxy-common, *pixl-common-env] + AZURE_CLIENT_ID: ${HASHER_API_AZ_CLIENT_ID} + AZURE_CLIENT_SECRET: ${HASHER_API_AZ_CLIENT_PASSWORD} + AZURE_TENANT_ID: ${HASHER_API_AZ_TENANT_ID} + AZURE_KEY_VAULT_NAME: ${HASHER_API_AZ_KEY_VAULT_NAME} + AZURE_KEY_VAULT_SECRET_NAME: ${HASHER_API_AZ_KEY_VAULT_SECRET_NAME} + env_file: + - ./docker/common.env + ports: + - "${HASHER_API_PORT}:8000" + volumes: + - *logs-volume + networks: + - pixl-net + healthcheck: + test: ["CMD", "curl", "-f", "http://hasher-api:8000/heart-beat"] + interval: 10s + timeout: 30s + retries: 5 + restart: "no" - orthanc-anon: - build: - context: . - dockerfile: ./docker/orthanc-anon/Dockerfile - args: - <<: *build-args-common - platform: linux/amd64 - command: /run/secrets - environment: - <<: [*proxy-common, *pixl-common-env, *azure-keyvault] - ORTHANC_NAME: "PIXL: Anon" - ORTHANC_USERNAME: ${ORTHANC_ANON_USERNAME} - ORTHANC_PASSWORD: ${ORTHANC_ANON_PASSWORD} - ORTHANC_ANON_AE_TITLE: ${ORTHANC_ANON_AE_TITLE} - ORTHANC_AUTOROUTE_ANON_TO_ENDPOINT: ${ORTHANC_AUTOROUTE_ANON_TO_ENDPOINT} - ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} - ORTHANC_RAW_DICOM_PORT: "4242" - ORTHANC_RAW_HOSTNAME: "orthanc-raw" - ORTHANC_URL: "http://localhost:8042" - PIXL_DB_HOST: ${PIXL_DB_HOST} - PIXL_DB_PORT: ${PIXL_DB_PORT} - PIXL_DB_NAME: ${PIXL_DB_NAME} - PIXL_DB_USER: ${PIXL_DB_USER} - PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} - DICOM_WEB_PLUGIN_ENABLED: ${ENABLE_DICOM_WEB} - HASHER_API_AZ_NAME: "hasher-api" - HASHER_API_PORT: 8000 - HTTP_TIMEOUT: ${ORTHANC_ANON_HTTP_TIMEOUT} - AZ_DICOM_ENDPOINT_NAME: ${AZ_DICOM_ENDPOINT_NAME} - AZ_DICOM_ENDPOINT_URL: ${AZ_DICOM_ENDPOINT_URL} - AZ_DICOM_ENDPOINT_TOKEN: ${AZ_DICOM_ENDPOINT_TOKEN} - AZ_DICOM_ENDPOINT_CLIENT_ID: ${AZ_DICOM_ENDPOINT_CLIENT_ID} - AZ_DICOM_ENDPOINT_CLIENT_SECRET: ${AZ_DICOM_ENDPOINT_CLIENT_SECRET} - AZ_DICOM_ENDPOINT_TENANT_ID: ${AZ_DICOM_ENDPOINT_TENANT_ID} - AZ_DICOM_TOKEN_REFRESH_SECS: "600" - TIME_OFFSET: "${STUDY_TIME_OFFSET}" - SALT_VALUE: ${SALT_VALUE}" - PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} - ports: - - "${ORTHANC_ANON_DICOM_PORT}:4242" - - "${ORTHANC_ANON_WEB_PORT}:8042" - volumes: - - type: volume - source: orthanc-anon-data - target: /var/lib/orthanc/db - - ${PWD}/orthanc/orthanc-anon/config:/run/secrets:ro - - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro - networks: - - pixl-net - # needed for same reason as export-api - extra_hosts: - - "host.docker.internal:host-gateway" - depends_on: - postgres: - condition: service_healthy - healthcheck: - test: - [ - "CMD-SHELL", "/probes/test-aliveness.py --user=$ORTHANC_USERNAME --pwd=$ORTHANC_PASSWORD" - ] - start_period: 10s - retries: 2 - interval: 3s - timeout: 2s - restart: "no" + orthanc-anon: + build: + context: . + dockerfile: ./docker/orthanc-anon/Dockerfile + args: + <<: *build-args-common + platform: linux/amd64 + command: /run/secrets + environment: + <<: [*proxy-common, *pixl-common-env, *azure-keyvault] + ORTHANC_NAME: "PIXL: Anon" + ORTHANC_USERNAME: ${ORTHANC_ANON_USERNAME} + ORTHANC_PASSWORD: ${ORTHANC_ANON_PASSWORD} + ORTHANC_ANON_AE_TITLE: ${ORTHANC_ANON_AE_TITLE} + ORTHANC_AUTOROUTE_ANON_TO_ENDPOINT: ${ORTHANC_AUTOROUTE_ANON_TO_ENDPOINT} + ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} + ORTHANC_RAW_DICOM_PORT: "4242" + ORTHANC_RAW_HOSTNAME: "orthanc-raw" + # For the export API + ORTHANC_ANON_URL: "http://localhost:8042" + ORTHANC_ANON_USERNAME: ${ORTHANC_ANON_USERNAME} + ORTHANC_ANON_PASSWORD: ${ORTHANC_ANON_PASSWORD} + PIXL_DB_HOST: ${PIXL_DB_HOST} + PIXL_DB_PORT: ${PIXL_DB_PORT} + PIXL_DB_NAME: ${PIXL_DB_NAME} + PIXL_DB_USER: ${PIXL_DB_USER} + PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} + DICOM_WEB_PLUGIN_ENABLED: ${ENABLE_DICOM_WEB} + HASHER_API_AZ_NAME: "hasher-api" + HASHER_API_PORT: 8000 + HTTP_TIMEOUT: ${ORTHANC_ANON_HTTP_TIMEOUT} + AZ_DICOM_ENDPOINT_NAME: ${AZ_DICOM_ENDPOINT_NAME} + AZ_DICOM_ENDPOINT_URL: ${AZ_DICOM_ENDPOINT_URL} + AZ_DICOM_ENDPOINT_TOKEN: ${AZ_DICOM_ENDPOINT_TOKEN} + AZ_DICOM_ENDPOINT_CLIENT_ID: ${AZ_DICOM_ENDPOINT_CLIENT_ID} + AZ_DICOM_ENDPOINT_CLIENT_SECRET: ${AZ_DICOM_ENDPOINT_CLIENT_SECRET} + AZ_DICOM_ENDPOINT_TENANT_ID: ${AZ_DICOM_ENDPOINT_TENANT_ID} + AZ_DICOM_TOKEN_REFRESH_SECS: "600" + TIME_OFFSET: "${STUDY_TIME_OFFSET}" + SALT_VALUE: ${SALT_VALUE}" + PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} + ports: + - "${ORTHANC_ANON_DICOM_PORT}:4242" + - "${ORTHANC_ANON_WEB_PORT}:8042" + volumes: + - type: volume + source: orthanc-anon-data + target: /var/lib/orthanc/db + - ${PWD}/orthanc/orthanc-anon/config:/run/secrets:ro + - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro + networks: + - pixl-net + # needed for same reason as export-api + extra_hosts: + - "host.docker.internal:host-gateway" + depends_on: + postgres: + condition: service_healthy + healthcheck: + test: + [ + "CMD-SHELL", + "/probes/test-aliveness.py --user=$ORTHANC_USERNAME --pwd=$ORTHANC_PASSWORD", + ] + start_period: 10s + retries: 2 + interval: 3s + timeout: 2s + restart: "no" - orthanc-raw: - build: - context: . - dockerfile: ./docker/orthanc-raw/Dockerfile - args: - <<: *build-args-common - ORTHANC_RAW_MAXIMUM_STORAGE_SIZE: ${ORTHANC_RAW_MAXIMUM_STORAGE_SIZE} - ORTHANC_RAW_JOB_HISTORY_SIZE: ${ORTHANC_RAW_JOB_HISTORY_SIZE} - ORTHANC_RAW_CONCURRENT_JOBS: ${ORTHANC_RAW_CONCURRENT_JOBS} - platform: linux/amd64 - command: /run/secrets - environment: - <<: [*pixl-db, *proxy-common, *pixl-common-env] - ORTHANC_NAME: "PIXL: Raw" - ORTHANC_USERNAME: ${ORTHANC_RAW_USERNAME} - ORTHANC_PASSWORD: ${ORTHANC_RAW_PASSWORD} - ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} - ORTHANC_AUTOROUTE_RAW_TO_ANON: ${ORTHANC_AUTOROUTE_RAW_TO_ANON} - ORTHANC_RAW_RECORD_HEADERS: ${ORTHANC_RAW_RECORD_HEADERS} - ORTHANC_RAW_HEADER_LOG_PATH: ${ORTHANC_RAW_HEADER_LOG_PATH} - VNAQR_AE_TITLE : ${VNAQR_AE_TITLE} - VNAQR_DICOM_PORT: ${VNAQR_DICOM_PORT} - VNAQR_IP_ADDR: ${VNAQR_IP_ADDR} - ORTHANC_ANON_AE_TITLE: ${ORTHANC_ANON_AE_TITLE} - ORTHANC_ANON_DICOM_PORT: "4242" - ORTHANC_ANON_HOSTNAME: "orthanc-anon" - PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} - ports: - - "${ORTHANC_RAW_DICOM_PORT}:4242" - - "${ORTHANC_RAW_WEB_PORT}:8042" - volumes: - - type: volume - source: orthanc-raw-data - target: /var/lib/orthanc/db - - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro - networks: - - pixl-net - depends_on: - postgres: - condition: service_healthy - orthanc-anon: - condition: service_started - healthcheck: - test: - [ - "CMD-SHELL", "/probes/test-aliveness.py --user=$ORTHANC_USERNAME --pwd=$ORTHANC_PASSWORD" - ] - start_period: 10s - retries: 10 - interval: 3s - timeout: 2s - restart: "no" + orthanc-raw: + build: + context: . + dockerfile: ./docker/orthanc-raw/Dockerfile + args: + <<: *build-args-common + ORTHANC_RAW_MAXIMUM_STORAGE_SIZE: ${ORTHANC_RAW_MAXIMUM_STORAGE_SIZE} + ORTHANC_RAW_JOB_HISTORY_SIZE: ${ORTHANC_RAW_JOB_HISTORY_SIZE} + ORTHANC_RAW_CONCURRENT_JOBS: ${ORTHANC_RAW_CONCURRENT_JOBS} + platform: linux/amd64 + command: /run/secrets + environment: + <<: [*pixl-db, *proxy-common, *pixl-common-env] + ORTHANC_NAME: "PIXL: Raw" + ORTHANC_USERNAME: ${ORTHANC_RAW_USERNAME} + ORTHANC_PASSWORD: ${ORTHANC_RAW_PASSWORD} + ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} + ORTHANC_AUTOROUTE_RAW_TO_ANON: ${ORTHANC_AUTOROUTE_RAW_TO_ANON} + ORTHANC_RAW_RECORD_HEADERS: ${ORTHANC_RAW_RECORD_HEADERS} + ORTHANC_RAW_HEADER_LOG_PATH: ${ORTHANC_RAW_HEADER_LOG_PATH} + VNAQR_AE_TITLE: ${VNAQR_AE_TITLE} + VNAQR_DICOM_PORT: ${VNAQR_DICOM_PORT} + VNAQR_IP_ADDR: ${VNAQR_IP_ADDR} + ORTHANC_ANON_AE_TITLE: ${ORTHANC_ANON_AE_TITLE} + ORTHANC_ANON_DICOM_PORT: "4242" + ORTHANC_ANON_HOSTNAME: "orthanc-anon" + PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} + ports: + - "${ORTHANC_RAW_DICOM_PORT}:4242" + - "${ORTHANC_RAW_WEB_PORT}:8042" + volumes: + - type: volume + source: orthanc-raw-data + target: /var/lib/orthanc/db + - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro + networks: + - pixl-net + depends_on: + postgres: + condition: service_healthy + orthanc-anon: + condition: service_started + healthcheck: + test: + [ + "CMD-SHELL", + "/probes/test-aliveness.py --user=$ORTHANC_USERNAME --pwd=$ORTHANC_PASSWORD", + ] + start_period: 10s + retries: 10 + interval: 3s + timeout: 2s + restart: "no" - queue: - image: rabbitmq:3.12.9-management - hostname: queue-host - environment: - RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME} - RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD} - RABBITMQ_NODENAME: "rabbit@queue-host" - healthcheck: - test: rabbitmq-diagnostics -q check_running - interval: 30s - timeout: 30s - retries: 3 - ports: - - "${RABBITMQ_PORT}:5672" - - "${RABBITMQ_ADMIN_PORT}:15672" - networks: - - pixl-net - volumes: - - rabbitmq:/var/lib/rabbitmq/mnesia + queue: + image: rabbitmq:3.12.9-management + hostname: queue-host + environment: + RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME} + RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD} + RABBITMQ_NODENAME: "rabbit@queue-host" + healthcheck: + test: rabbitmq-diagnostics -q check_running + interval: 30s + timeout: 30s + retries: 3 + ports: + - "${RABBITMQ_PORT}:5672" + - "${RABBITMQ_ADMIN_PORT}:15672" + networks: + - pixl-net + volumes: + - rabbitmq:/var/lib/rabbitmq/mnesia - export-api: - build: - context: . - dockerfile: ./docker/export-api/Dockerfile - args: - <<: *build-args-common - environment: - <<: - [ - *pixl-db, - *emap-db, - *proxy-common, - *pixl-common-env, - *pixl-rabbit-mq, - *azure-keyvault, - ] - ORTHANC_ANON_USERNAME: ${ORTHANC_ANON_USERNAME} - ORTHANC_ANON_PASSWORD: ${ORTHANC_ANON_PASSWORD} - PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} - PIXL_MAX_MESSAGES_IN_FLIGHT: ${PIXL_MAX_MESSAGES_IN_FLIGHT} - env_file: - - ./docker/common.env - depends_on: - queue: - condition: service_healthy - postgres: - condition: service_healthy - hasher-api: - condition: service_healthy - ports: - - "${PIXL_EXPORT_API_PORT}:8000" - healthcheck: - interval: 10s - timeout: 30s - retries: 5 - networks: - - pixl-net - # needed for testing under GHA (linux), so this container - # can reach the test FTP server running on the docker host - extra_hosts: - - "host.docker.internal:host-gateway" - volumes: - - ${PWD}/projects/exports:/run/projects/exports - - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro + export-api: + build: + context: . + dockerfile: ./docker/export-api/Dockerfile + args: + <<: *build-args-common + environment: + <<: + [ + *pixl-db, + *emap-db, + *proxy-common, + *pixl-common-env, + *pixl-rabbit-mq, + *azure-keyvault, + ] + ORTHANC_ANON_URL: "http://orthanc-anon:8042" + ORTHANC_ANON_USERNAME: ${ORTHANC_ANON_USERNAME} + ORTHANC_ANON_PASSWORD: ${ORTHANC_ANON_PASSWORD} + PROJECT_CONFIGS_DIR: /${PROJECT_CONFIGS_DIR:-/projects/configs} + PIXL_MAX_MESSAGES_IN_FLIGHT: ${PIXL_MAX_MESSAGES_IN_FLIGHT} + env_file: + - ./docker/common.env + depends_on: + queue: + condition: service_healthy + postgres: + condition: service_healthy + hasher-api: + condition: service_healthy + ports: + - "${PIXL_EXPORT_API_PORT}:8000" + healthcheck: + interval: 10s + timeout: 30s + retries: 5 + networks: + - pixl-net + # needed for testing under GHA (linux), so this container + # can reach the test FTP server running on the docker host + extra_hosts: + - "host.docker.internal:host-gateway" + volumes: + - ${PWD}/projects/exports:/run/projects/exports + - ${PWD}/projects/configs:/${PROJECT_CONFIGS_DIR:-/projects/configs}:ro - imaging-api: - build: - context: . - dockerfile: ./docker/imaging-api/Dockerfile - args: - <<: *build-args-common - depends_on: - queue: - condition: service_healthy - orthanc-raw: - condition: service_healthy - healthcheck: - test: curl -f http://0.0.0.0:8000/heart-beat - interval: 10s - timeout: 30s - retries: 5 - networks: - - pixl-net - environment: - <<: [*pixl-rabbit-mq, *proxy-common, *pixl-common-env] - ORTHANC_RAW_URL: ${ORTHANC_RAW_URL} - ORTHANC_RAW_USERNAME: ${ORTHANC_RAW_USERNAME} - ORTHANC_RAW_PASSWORD: ${ORTHANC_RAW_PASSWORD} - ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} - VNAQR_MODALITY: ${VNAQR_MODALITY} - SKIP_ALEMBIC: ${SKIP_ALEMBIC} - PIXL_DB_HOST: ${PIXL_DB_HOST} - PIXL_DB_PORT: ${PIXL_DB_PORT} - PIXL_DB_NAME: ${PIXL_DB_NAME} - PIXL_DB_USER: ${PIXL_DB_USER} - PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} - PIXL_DICOM_TRANSFER_TIMEOUT: ${PIXL_DICOM_TRANSFER_TIMEOUT} - PIXL_QUERY_TIMEOUT: ${PIXL_QUERY_TIMEOUT} - PIXL_MAX_MESSAGES_IN_FLIGHT: ${PIXL_MAX_MESSAGES_IN_FLIGHT} - ports: - - "${PIXL_IMAGING_API_PORT}:8000" + imaging-api: + build: + context: . + dockerfile: ./docker/imaging-api/Dockerfile + args: + <<: *build-args-common + depends_on: + queue: + condition: service_healthy + orthanc-raw: + condition: service_healthy + healthcheck: + test: curl -f http://0.0.0.0:8000/heart-beat + interval: 10s + timeout: 30s + retries: 5 + networks: + - pixl-net + environment: + <<: [*pixl-rabbit-mq, *proxy-common, *pixl-common-env] + ORTHANC_RAW_URL: ${ORTHANC_RAW_URL} + ORTHANC_RAW_USERNAME: ${ORTHANC_RAW_USERNAME} + ORTHANC_RAW_PASSWORD: ${ORTHANC_RAW_PASSWORD} + ORTHANC_RAW_AE_TITLE: ${ORTHANC_RAW_AE_TITLE} + VNAQR_MODALITY: ${VNAQR_MODALITY} + SKIP_ALEMBIC: ${SKIP_ALEMBIC} + PIXL_DB_HOST: ${PIXL_DB_HOST} + PIXL_DB_PORT: ${PIXL_DB_PORT} + PIXL_DB_NAME: ${PIXL_DB_NAME} + PIXL_DB_USER: ${PIXL_DB_USER} + PIXL_DB_PASSWORD: ${PIXL_DB_PASSWORD} + PIXL_DICOM_TRANSFER_TIMEOUT: ${PIXL_DICOM_TRANSFER_TIMEOUT} + PIXL_QUERY_TIMEOUT: ${PIXL_QUERY_TIMEOUT} + PIXL_MAX_MESSAGES_IN_FLIGHT: ${PIXL_MAX_MESSAGES_IN_FLIGHT} + ports: + - "${PIXL_IMAGING_API_PORT}:8000" - ################################################################################ - # Data Stores - postgres: - build: - context: . - dockerfile: ./docker/postgres/Dockerfile - args: - <<: *build-args-common - environment: - POSTGRES_USER: ${PIXL_DB_USER} - POSTGRES_PASSWORD: ${PIXL_DB_PASSWORD} - POSTGRES_DB: ${PIXL_DB_NAME} - PGTZ: Europe/London - env_file: - - ./docker/common.env - command: postgres -c 'config_file=/etc/postgresql/postgresql.conf' - volumes: - - type: volume - source: postgres-data - target: /var/lib/postgresql/data - ports: - - "${POSTGRES_PORT}:5432" - healthcheck: - test: ["CMD", "pg_isready", "-U", "${PIXL_DB_USER}"] - interval: 10s - timeout: 30s - retries: 5 - restart: always - networks: - - pixl-net + ################################################################################ + # Data Stores + postgres: + build: + context: . + dockerfile: ./docker/postgres/Dockerfile + args: + <<: *build-args-common + environment: + POSTGRES_USER: ${PIXL_DB_USER} + POSTGRES_PASSWORD: ${PIXL_DB_PASSWORD} + POSTGRES_DB: ${PIXL_DB_NAME} + PGTZ: Europe/London + env_file: + - ./docker/common.env + command: postgres -c 'config_file=/etc/postgresql/postgresql.conf' + volumes: + - type: volume + source: postgres-data + target: /var/lib/postgresql/data + ports: + - "${POSTGRES_PORT}:5432" + healthcheck: + test: ["CMD", "pg_isready", "-U", "${PIXL_DB_USER}"] + interval: 10s + timeout: 30s + retries: 5 + restart: always + networks: + - pixl-net diff --git a/pixl_core/README.md b/pixl_core/README.md index e72705497..a346de4a3 100644 --- a/pixl_core/README.md +++ b/pixl_core/README.md @@ -150,7 +150,7 @@ to implement this. The configuration for the DICOMweb server is controlled by the following environment variables and secrets: -- `"ORTHANC_URL"`: The URL of the Orthanc server from _where_ the upload will happen, this will typically be the `orthanc-anon` instance +- `"ORTHANC_ANON_URL"`: The URL of the Orthanc server from _where_ the upload will happen, this will typically be the `orthanc-anon` instance - The `"--dicomweb--username"` and `"--dicomweb--password"` for authentication, which are fetched from the [Azure Keyvault](../docs/setup/azure-keyvault.md) - The `"--dicomweb--url"` to define the DICOMweb endpoint in Orthanc, also fetched from the Azure Keyvault diff --git a/pixl_core/src/core/exports.py b/pixl_core/src/core/exports.py index aa71500f1..c884b6d99 100644 --- a/pixl_core/src/core/exports.py +++ b/pixl_core/src/core/exports.py @@ -133,9 +133,7 @@ def upload(self) -> None: logger.info(msg) else: - uploader = get_uploader( - self.project_slug, destination, project_config.project.azure_kv_alias - ) + uploader = get_uploader(self.project_slug) msg = f"Uploading parquet files for project {self.project_slug} via '{destination}'" logger.info(msg) diff --git a/pixl_core/src/core/uploader/__init__.py b/pixl_core/src/core/uploader/__init__.py index 1b3dca2dc..6d4be3568 100644 --- a/pixl_core/src/core/uploader/__init__.py +++ b/pixl_core/src/core/uploader/__init__.py @@ -23,21 +23,26 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING -from core.uploader._dicomweb import DicomWebUploader -from core.uploader._ftps import FTPSUploader +from core.project_config import load_project_config + +from ._dicomweb import DicomWebUploader +from ._ftps import FTPSUploader if TYPE_CHECKING: from core.uploader.base import Uploader # Intenitonally defined in __init__.py to avoid circular imports -def get_uploader(project_slug: str, destination: str, keyvault_alias: Optional[str]) -> Uploader: +def get_uploader(project_slug: str) -> Uploader: """Uploader Factory, returns uploader instance based on destination.""" choices: dict[str, type[Uploader]] = {"ftps": FTPSUploader, "dicomweb": DicomWebUploader} + project_config = load_project_config(project_slug) + destination = project_config.destination.dicom + try: - return choices[destination](project_slug, keyvault_alias) + return choices[destination](project_slug, project_config.project.azure_kv_alias) except KeyError: error_msg = f"Destination '{destination}' is currently not supported" diff --git a/pixl_core/src/core/uploader/_dicomweb.py b/pixl_core/src/core/uploader/_dicomweb.py index b10545ab6..7e2d84813 100644 --- a/pixl_core/src/core/uploader/_dicomweb.py +++ b/pixl_core/src/core/uploader/_dicomweb.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Uploader subclass for FTPS uploads.""" +"""Uploader subclass for DICOMweb uploads.""" from __future__ import annotations @@ -25,6 +25,8 @@ from core.uploader.base import Uploader +from ._orthanc import get_tags_by_study + class DicomWebUploader(Uploader): """Upload strategy for a DicomWeb server.""" @@ -36,82 +38,118 @@ def __init__(self, project_slug: str, keyvault_alias: Optional[str]) -> None: def _set_config(self) -> None: # Use the Azure KV alias as prefix if it exists, otherwise use the project name az_prefix = self.keyvault_alias - az_prefix = az_prefix if az_prefix else self.project_slug - - self.user = self.keyvault.fetch_secret(f"{az_prefix}--dicomweb--username") - self.password = self.keyvault.fetch_secret(f"{az_prefix}--dicomweb--password") - self.orthanc_url = config("ORTHANC_URL") - self.endpoint_name = self.keyvault.fetch_secret(f"{az_prefix}--dicomweb--url") - self.url = self.orthanc_url + "/dicom-web/servers/" + self.endpoint_name - - def upload_dicom_image(self) -> None: - msg = "Currently not implemented. Use `send_via_stow()` instead." - raise NotImplementedError(msg) - - def send_via_stow(self, resource_id: str) -> requests.Response: + self.az_prefix = az_prefix if az_prefix else self.project_slug + + self.orthanc_user = config("ORTHANC_ANON_USERNAME") + self.orthanc_password = config("ORTHANC_ANON_PASSWORD") + self.orthanc_url = config("ORTHANC_ANON_URL") + self.endpoint_user = self.keyvault.fetch_secret(f"{self.az_prefix}--dicomweb--username") + self.endpoint_password = self.keyvault.fetch_secret(f"{self.az_prefix}--dicomweb--password") + self.endpoint_url = self.keyvault.fetch_secret(f"{self.az_prefix}--dicomweb--url") + # The DICOMweb API endpoint on the Orthanc server, used by Orthanc to interact with the + # DICOMweb server. Note that this is different from the endpoint_url, which is the URL of + # the DICOMweb server itself. + self.orthanc_dicomweb_url = self.orthanc_url + "/dicom-web/servers/" + self.az_prefix + self.http_timeout = int(config("HTTP_TIMEOUT", default=30)) + + def upload_dicom_image(self, study_id: str) -> None: + pseudo_anon_image_id, _ = get_tags_by_study(study_id) + logger.info("Starting DICOMweb upload of '{}'", pseudo_anon_image_id) + self.send_via_stow(study_id, pseudo_anon_image_id) + logger.info("Finished DICOMweb upload of '{}'", pseudo_anon_image_id) + + def send_via_stow(self, resource_id: str, pseudo_anon_image_id: str) -> requests.Response: """Upload a Dicom resource to the DicomWeb server from within Orthanc.""" - if not self._check_dicomweb_server(): + if not self._check_dicomweb_server_exists(): logger.info("Creating new DICOMWeb credentials") self._setup_dicomweb_credentials() - headers = {"content-type": "application/dicom", "accept": "application/dicom+json"} + self._validate_dicomweb_server() + super().check_already_exported(pseudo_anon_image_id) + + headers = {"content-type": "application/json", "accept": "application/dicom+json"} payload = {"Resources": [resource_id], "Synchronous": False} try: response = requests.post( - self.url + "/stow", - auth=(self.user, self.password), + self.orthanc_dicomweb_url + "/stow", + auth=(self.orthanc_user, self.orthanc_password), headers=headers, data=json.dumps(payload), - timeout=30, + timeout=self.http_timeout, ) response.raise_for_status() except requests.exceptions.RequestException: logger.error("Failed to send via stow") raise - else: - logger.info("Dicom resource {} sent via stow", resource_id) + super().update_exported_timestamp(pseudo_anon_image_id) + logger.info("Dicom resource {} sent via stow", resource_id) return response - def _check_dicomweb_server(self) -> bool: + def _check_dicomweb_server_exists(self) -> bool: """Checks if the dicomweb server exists.""" - response = requests.get(self.url, auth=(self.user, self.password), timeout=30) + response = requests.get( + self.orthanc_dicomweb_url, + auth=(self.orthanc_user, self.orthanc_password), + timeout=self.http_timeout, + ) success_code = 200 if response.status_code != success_code: return False return True + def _validate_dicomweb_server(self) -> None: + """Check if the DICOMweb server is reachable from within the Orthanc instance.""" + connection_error = requests.exceptions.ConnectionError("DICOMweb server not reachable") + if not self._check_dicomweb_server_exists(): + logger.error("DICOMweb server not configured") + raise connection_error + + # If dicomweb server configured, check if we can reach it + try: + response = requests.post( + self.orthanc_dicomweb_url + "/get", + auth=(self.orthanc_user, self.orthanc_password), + data='{"Uri": "/studies"}', + headers={"content-type": "application/x-www-form-urlencoded"}, + timeout=self.http_timeout, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + logger.error("Failed to reach DICOMweb server") + raise connection_error from e + def _setup_dicomweb_credentials(self) -> None: """ Add the necessary credentials to the DicomWeb server in Orthanc. - This dyniamically creates a new endpoint in Orthanc with the necessary credentials, so we + This dynamically creates a new endpoint in Orthanc with the necessary credentials, so we can avoid hardcoding the credentials in the Orthanc configuration at build time. """ - DICOM_ENDPOINT_URL = config("DICOM_ENDPOINT_URL") - HTTP_TIMEOUT = int(config("HTTP_TIMEOUT", default=30)) - dicomweb_config = { - "Url": DICOM_ENDPOINT_URL, - "Username": self.user, - "Password": self.password, + "Url": self.endpoint_url, + "Username": self.endpoint_user, + "Password": self.endpoint_password, "HasDelete": True, - "Timeout": HTTP_TIMEOUT, + "Timeout": self.http_timeout, } headers = {"content-type": "application/json"} try: - requests.put( - self.url, - auth=(self.user, self.password), + response = requests.put( + self.orthanc_dicomweb_url, + auth=(self.orthanc_user, self.orthanc_password), headers=headers, data=json.dumps(dicomweb_config), timeout=10, ) + response.raise_for_status() except requests.exceptions.RequestException: - logger.error("Failed to update DICOMweb token") + logger.error("Failed to update DICOMweb config for {}", self.orthanc_dicomweb_url) raise else: - logger.info("DICOMweb token updated") + logger.info("Set up DICOMweb config for {}", self.orthanc_dicomweb_url) + + self._validate_dicomweb_server() def upload_parquet_files(self) -> None: msg = "DICOMWeb uploader does not support parquet files" diff --git a/pixl_core/src/core/uploader/_ftps.py b/pixl_core/src/core/uploader/_ftps.py index b714ab83c..61a95827d 100644 --- a/pixl_core/src/core/uploader/_ftps.py +++ b/pixl_core/src/core/uploader/_ftps.py @@ -18,14 +18,14 @@ import ftplib import ssl -from datetime import datetime, timezone from ftplib import FTP_TLS from pathlib import Path from typing import TYPE_CHECKING, Any, BinaryIO, Optional -from core.db.queries import have_already_exported_image, update_exported_at from core.uploader.base import Uploader +from ._orthanc import get_study_zip_archive, get_tags_by_study + if TYPE_CHECKING: from socket import socket @@ -76,19 +76,20 @@ def _set_config(self) -> None: self.password = self.keyvault.fetch_secret(f"{az_prefix}--ftp--password") self.port = int(self.keyvault.fetch_secret(f"{az_prefix}--ftp--port")) - def upload_dicom_image( - self, - zip_content: BinaryIO, - pseudo_anon_image_id: str, - remote_directory: str, - ) -> None: + def upload_dicom_image(self, study_id: str) -> None: """Upload a DICOM image to the FTPS server.""" + pseudo_anon_image_id, project_slug = get_tags_by_study(study_id) logger.info("Starting FTPS upload of '{}'", pseudo_anon_image_id) - # name destination to {project-slug}/{study-pseudonymised-id}.zip - if have_already_exported_image(pseudo_anon_image_id): - msg = "Image already exported" - raise RuntimeError(msg) + zip_content = get_study_zip_archive(study_id) + self.send_via_ftps(zip_content, pseudo_anon_image_id, remote_directory=project_slug) + logger.info("Finished FTPS upload of '{}'", pseudo_anon_image_id) + + def send_via_ftps( + self, zip_content: BinaryIO, pseudo_anon_image_id: str, remote_directory: str + ) -> None: + """Send the zip content to the FTPS server.""" + super().check_already_exported(pseudo_anon_image_id) # Create the remote directory if it doesn't exist ftp = _connect_to_ftp(self.host, self.port, self.user, self.password) @@ -106,10 +107,7 @@ def upload_dicom_image( # Close the FTP connection ftp.quit() - - # Update the exported_at timestamp in the PIXL database - update_exported_at(pseudo_anon_image_id, datetime.now(tz=timezone.utc)) - logger.info("Finished FTPS upload of '{}'", pseudo_anon_image_id) + super().update_exported_timestamp(pseudo_anon_image_id) def upload_parquet_files(self, parquet_export: ParquetExport) -> None: """ diff --git a/pixl_export/src/pixl_export/_orthanc.py b/pixl_core/src/core/uploader/_orthanc.py similarity index 92% rename from pixl_export/src/pixl_export/_orthanc.py rename to pixl_core/src/core/uploader/_orthanc.py index e7423e5cc..05dfd5a3c 100644 --- a/pixl_export/src/pixl_export/_orthanc.py +++ b/pixl_core/src/core/uploader/_orthanc.py @@ -53,7 +53,9 @@ def get_tags_by_study(study_id: str) -> tuple[str, str]: def _query_orthanc_anon(resourceId: str, query: str, fail_msg: str) -> requests.Response: try: response = requests.get( - query, auth=(ORTHANC_ANON_USERNAME, ORTHANC_ANON_PASSWORD), timeout=10 + query, + auth=(config("ORTHANC_ANON_USERNAME"), config("ORTHANC_ANON_PASSWORD")), + timeout=10, ) response.raise_for_status() except requests.exceptions.RequestException: @@ -63,6 +65,4 @@ def _query_orthanc_anon(resourceId: str, query: str, fail_msg: str) -> requests. return response -ORTHANC_ANON_USERNAME = config("ORTHANC_ANON_USERNAME") -ORTHANC_ANON_PASSWORD = config("ORTHANC_ANON_PASSWORD") ORTHANC_ANON_URL = "http://orthanc-anon:8042" diff --git a/pixl_core/src/core/uploader/base.py b/pixl_core/src/core/uploader/base.py index 948722ee3..4a1e4fe91 100644 --- a/pixl_core/src/core/uploader/base.py +++ b/pixl_core/src/core/uploader/base.py @@ -16,8 +16,10 @@ from __future__ import annotations from abc import ABC, abstractmethod +from datetime import datetime, timezone from typing import Any, Optional +from core.db.queries import have_already_exported_image, update_exported_at from core.project_config.secrets import AzureKeyVault @@ -46,7 +48,7 @@ def _set_config(self) -> None: """Set the configuration for the uploader.""" @abstractmethod - def upload_dicom_image(self, *args: Any, **kwargs: Any) -> None: + def upload_dicom_image(self, study_id: str) -> None: """ Abstract method to upload DICOM images. To be overwritten by child classes. If an upload strategy does not support DICOM images, this method should raise a @@ -60,3 +62,13 @@ def upload_parquet_files(self, *args: Any, **kwargs: Any) -> None: If an upload strategy does not support parquet files, this method should raise a NotImplementedError. """ + + def check_already_exported(self, pseudo_anon_image_id: str) -> None: + """Check if the image has already been exported.""" + if have_already_exported_image(pseudo_anon_image_id): + msg = "Image already exported" + raise RuntimeError(msg) + + def update_exported_timestamp(self, pseudo_anon_image_id: str) -> None: + """Update the exported_at timestamp in the PIXL database for the given image.""" + update_exported_at(pseudo_anon_image_id, datetime.now(tz=timezone.utc)) diff --git a/pixl_core/tests/conftest.py b/pixl_core/tests/conftest.py index 771aa2d40..c6fb50bfd 100644 --- a/pixl_core/tests/conftest.py +++ b/pixl_core/tests/conftest.py @@ -18,14 +18,12 @@ import pathlib import shlex from pathlib import Path -from typing import TYPE_CHECKING, BinaryIO +from typing import TYPE_CHECKING import pytest import requests from core.db.models import Base, Extract, Image -from core.uploader._ftps import FTPSUploader from pytest_pixl.helpers import run_subprocess -from pytest_pixl.plugin import FtpHostAddress from sqlalchemy import Engine, create_engine from sqlalchemy.orm import Session, sessionmaker @@ -50,12 +48,9 @@ os.environ["FTP_PASSWORD"] = "longpassword" # noqa: S105 Hardcoding password os.environ["FTP_PORT"] = "20021" -os.environ["ORTHANC_URL"] = "http://localhost:8043" -os.environ["ORTHANC_USERNAME"] = "orthanc" -os.environ["ORTHANC_PASSWORD"] = "orthanc" # noqa: S105, hardcoded password -os.environ["DICOM_ENDPOINT_NAME"] = "test" -# Endpoint for DICOMWeb server as seen from within Orthanc -os.environ["DICOM_ENDPOINT_URL"] = "http://localhost:8042/dicom-web/" +os.environ["ORTHANC_ANON_URL"] = "http://localhost:8043" +os.environ["ORTHANC_ANON_USERNAME"] = "orthanc" +os.environ["ORTHANC_ANON_PASSWORD"] = "orthanc" # noqa: S105, hardcoded password @pytest.fixture(scope="package") @@ -67,7 +62,7 @@ def run_containers() -> Generator[subprocess.CompletedProcess[bytes], None, None timeout=60, ) yield run_subprocess( - shlex.split("docker compose up --build --wait"), + shlex.split("docker compose up --build --wait --remove-orphans"), TEST_DIR, timeout=60, ) @@ -79,86 +74,23 @@ def run_containers() -> Generator[subprocess.CompletedProcess[bytes], None, None @pytest.fixture(scope="package") -def run_dicomweb_containers() -> Generator[subprocess.CompletedProcess[bytes], None, None]: - """ - Spins up 2 Orthanc containers, one that acts as the base storage, mimicking our orthanc-anon - or orthanc-raw servers, and the other one as a DICOMweb server to upload DICOM files to. - """ - run_subprocess( - shlex.split("docker compose down --volumes"), - TEST_DIR, - timeout=60, - ) - yield run_subprocess( - shlex.split("docker compose -f docker-compose.dicomweb.yml up --build --wait"), - TEST_DIR, - timeout=60, - ) - run_subprocess( - shlex.split("docker compose down --volumes"), - TEST_DIR, - timeout=60, - ) - - -@pytest.fixture(scope="package") -def study_id(run_dicomweb_containers) -> str: +def study_id(run_containers) -> str: """Uploads a DICOM file to the Orthanc server and returns the study ID.""" DCM_FILE = Path(__file__).parents[2] / "test" / "resources" / "Dicom1.dcm" - ORTHANC_URL = os.environ["ORTHANC_URL"] + ORTHANC_ANON_URL = os.environ["ORTHANC_ANON_URL"] headers = {"content-type": "application/dicom"} data = DCM_FILE.read_bytes() response = requests.post( - f"{ORTHANC_URL}/instances", + f"{ORTHANC_ANON_URL}/instances", data=data, headers=headers, - auth=(os.environ["ORTHANC_USERNAME"], os.environ["ORTHANC_PASSWORD"]), + auth=(os.environ["ORTHANC_ANON_USERNAME"], os.environ["ORTHANC_ANON_PASSWORD"]), timeout=60, ) return response.json()["ParentStudy"] -class MockFTPSUploader(FTPSUploader): - """Mock FTPSUploader for testing.""" - - def __init__(self) -> None: - """Initialise the mock uploader with hardcoded values for FTPS config.""" - self.host = os.environ["FTP_HOST"] - self.user = os.environ["FTP_USER_NAME"] - self.password = os.environ["FTP_PASSWORD"] - self.port = int(os.environ["FTP_PORT"]) - - -@pytest.fixture() -def ftps_uploader() -> MockFTPSUploader: - """Return a MockFTPSUploader object.""" - return MockFTPSUploader() - - -@pytest.fixture() -def ftps_home_dir(ftps_server) -> Path: - """ - Return the FTPS server home directory, the ftps_server fixture already uses - pytest.tmp_path_factory, so no need to clean up. - """ - return Path(ftps_server.home_dir) - - -@pytest.fixture(scope="session") -def ftp_host_address(): - """Run FTP on localhost - no docker containers need to access it""" - return FtpHostAddress.LOCALHOST - - -@pytest.fixture() -def test_zip_content() -> BinaryIO: - """Directory containing the test data for uploading to the ftp server.""" - test_zip_file = TEST_DIR / "data" / "public.zip" - with test_zip_file.open("rb") as file_content: - yield file_content - - @pytest.fixture(scope="module") def monkeymodule(): """Module level monkey patch.""" @@ -170,7 +102,7 @@ def monkeymodule(): @pytest.fixture(autouse=True, scope="module") -def db_engine(monkeymodule) -> Engine: +def db_engine(monkeymodule) -> Generator[Engine, None, None]: """ Patches the database engine with an in memory database @@ -193,7 +125,7 @@ def db_engine(monkeymodule) -> Engine: @pytest.fixture() -def db_session(db_engine) -> Session: +def db_session(db_engine) -> Generator[Session, None, None]: """ Creates a session for interacting with an in memory database. diff --git a/pixl_core/tests/docker-compose.dicomweb.yml b/pixl_core/tests/docker-compose.dicomweb.yml deleted file mode 100644 index 620a4def1..000000000 --- a/pixl_core/tests/docker-compose.dicomweb.yml +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) University College London Hospitals NHS Foundation Trust -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -networks: - pixl-test: - -services: - orthanc: - image: orthancteam/orthanc:24.4.0 - platform: linux/amd64 - environment: - ORTHANC_NAME: "orthanc" - ORTHANC_USERNAME: "orthanc" - ORTHANC_PASSWORD: "orthanc" - ORTHANC_AE_TITLE: "orthanc" - RAW_AE_TITLE: ORHTANCRAW - RAW_DICOM_PORT: "4242" - RAW_IP_ADDR: "orthanc-raw" # aka. hostname - DICOM_WEB_PLUGIN_ENABLED: true - ports: - - "4243:4242" - - "8043:8042" - volumes: - - ${PWD}/dicomweb_config/:/run/secrets:ro - networks: - - pixl-test - healthcheck: - test: ["CMD-SHELL", "/probes/test-aliveness.py --user=orthanc --pwd=orthanc"] - start_period: 10s - retries: 2 - interval: 3s - timeout: 2s - - dicomweb-server: - image: orthancteam/orthanc:24.4.0 - platform: linux/amd64 - environment: - ORTHANC_NAME: "dicomweb" - ORTHANC_USERNAME: "orthanc" - ORTHANC_PASSWORD: "orthanc" - ORTHANC_AE_TITLE: "DICOMWEB" - RAW_AE_TITLE: ORHTANCRAW - RAW_DICOM_PORT: "4242" - RAW_IP_ADDR: "orthanc-raw" # aka. hostname - DICOM_WEB_PLUGIN_ENABLED: true - ports: - - "4244:4242" - - "8044:8042" - volumes: - - ${PWD}/../../test/dicomweb_config/:/run/secrets:ro - networks: - - pixl-test - healthcheck: - test: ["CMD-SHELL", "/probes/test-aliveness.py --user=orthanc --pwd=orthanc"] - start_period: 10s - retries: 2 - interval: 3s - timeout: 2s diff --git a/pixl_core/tests/docker-compose.yml b/pixl_core/tests/docker-compose.yml index 98f9ebc24..ed9270086 100644 --- a/pixl_core/tests/docker-compose.yml +++ b/pixl_core/tests/docker-compose.yml @@ -11,22 +11,75 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -version: "3.8" - +networks: + pixl-test: services: + queue: + container_name: pixl-test-queue + image: rabbitmq:3.12.9-management + environment: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + ports: + - "25672:5672" + - "35672:15672" + healthcheck: + test: rabbitmq-diagnostics -q check_running + interval: 10s + timeout: 5s + retries: 5 + + orthanc: + image: orthancteam/orthanc:24.4.0 + platform: linux/amd64 + environment: + ORTHANC_NAME: "orthanc" + ORTHANC_USERNAME: "orthanc" + ORTHANC_PASSWORD: "orthanc" + ORTHANC_AE_TITLE: "orthanc" + RAW_AE_TITLE: ORTHANCRAW + RAW_DICOM_PORT: "4242" + RAW_IP_ADDR: "orthanc-raw" # aka. hostname + DICOM_WEB_PLUGIN_ENABLED: true + ports: + - "4243:4242" + - "8043:8042" + networks: + - pixl-test + healthcheck: + test: ["CMD-SHELL", "/probes/test-aliveness.py --user=orthanc --pwd=orthanc"] + start_period: 10s + retries: 2 + interval: 3s + timeout: 2s - queue: - container_name: pixl-test-queue - image: rabbitmq:3.12.9-management - environment: - RABBITMQ_DEFAULT_USER: guest - RABBITMQ_DEFAULT_PASS: guest - ports: - - "25672:5672" - - "35672:15672" - healthcheck: - test: rabbitmq-diagnostics -q check_running - interval: 10s - timeout: 5s - retries: 5 + dicomweb-server: + image: orthancteam/orthanc:24.4.0 + platform: linux/amd64 + environment: + ORTHANC_NAME: "dicomweb" + ORTHANC_USERNAME: "orthanc_dicomweb" + ORTHANC_PASSWORD: "orthanc_dicomweb" + ORTHANC_AE_TITLE: "DICOMWEB" + RAW_AE_TITLE: ORTHANCRAW + RAW_DICOM_PORT: "4242" + RAW_IP_ADDR: "dicom-web" # aka. hostname + DICOM_WEB_PLUGIN_ENABLED: true + ports: + - "4244:4242" + - "8044:8042" + volumes: + - ../../test/dicomweb_config/:/run/secrets:ro + networks: + - pixl-test + healthcheck: + test: + [ + "CMD-SHELL", + "/probes/test-aliveness.py --user=orthanc_dicomweb --pwd=orthanc_dicomweb", + ] + start_period: 10s + retries: 2 + interval: 3s + timeout: 2s diff --git a/pixl_core/tests/uploader/test_dicomweb.py b/pixl_core/tests/uploader/test_dicomweb.py index a8c56ef21..43cbd1645 100644 --- a/pixl_core/tests/uploader/test_dicomweb.py +++ b/pixl_core/tests/uploader/test_dicomweb.py @@ -15,17 +15,21 @@ from __future__ import annotations -from typing import Optional +import time import pytest import requests from core.uploader._dicomweb import DicomWebUploader from decouple import config # type: ignore [import-untyped] -ORTHANC_URL = config("ORTHANC_URL") -DICOM_ENDPOINT_NAME = config("DICOM_ENDPOINT_NAME") -ORTHANC_USERNAME = config("ORTHANC_USERNAME") -ORTHANC_PASSWORD = config("ORTHANC_PASSWORD") +ORTHANC_ANON_URL = config("ORTHANC_ANON_URL") +ORTHANC_USERNAME = config("ORTHANC_ANON_USERNAME") +ORTHANC_PASSWORD = config("ORTHANC_ANON_PASSWORD") + +DICOMWEB_USERNAME = "orthanc_dicomweb" +DICOMWEB_PASSWORD = "orthanc_dicomweb" # noqa: S105, hardcoded password + +LOCAL_DICOMWEB_URL = "http://localhost:8044" class MockDicomWebUploader(DicomWebUploader): @@ -33,11 +37,17 @@ class MockDicomWebUploader(DicomWebUploader): def __init__(self) -> None: """Initialise the mock uploader.""" - self.user = ORTHANC_USERNAME - self.password = ORTHANC_PASSWORD - self.endpoint_name = DICOM_ENDPOINT_NAME - self.orthanc_url = ORTHANC_URL - self.url = self.orthanc_url + "/dicom-web/servers/" + self.endpoint_name + self.az_prefix = "test" + self.orthanc_user = ORTHANC_USERNAME + self.orthanc_password = ORTHANC_PASSWORD + self.orthanc_url = ORTHANC_ANON_URL + self.endpoint_user = DICOMWEB_USERNAME + self.endpoint_password = DICOMWEB_PASSWORD + # URL for DICOMWeb server as seen from within Orthanc, i.e. the address of the dicomweb + # server within the Docker compose network + self.endpoint_url = "http://dicomweb-server:8042/dicom-web" + self.orthanc_dicomweb_url = self.orthanc_url + "/dicom-web/servers/" + self.az_prefix + self.http_timeout = 30 @pytest.fixture() @@ -46,29 +56,77 @@ def dicomweb_uploader() -> MockDicomWebUploader: return MockDicomWebUploader() -def _do_get_request(endpoint: str, data: Optional[dict] = None) -> requests.Response: - """Perform a GET request to the specified endpoint.""" - return requests.get( - ORTHANC_URL + endpoint, +def test_dicomweb_server_config(run_containers, dicomweb_uploader) -> None: + """Tests that the DICOMWeb server is configured correctly in Orthanc""" + dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method + servers_response = requests.get( + ORTHANC_ANON_URL + "/dicom-web/servers", auth=(ORTHANC_USERNAME, ORTHANC_PASSWORD), - data=data, timeout=30, ) + servers_response.raise_for_status() + assert "test" in servers_response.json() + + +def _check_study_present_on_dicomweb(study_id: str) -> bool: + """Check if a study is present on the DICOMWeb server.""" + response = requests.get( + LOCAL_DICOMWEB_URL + "/studies", + auth=(DICOMWEB_USERNAME, DICOMWEB_PASSWORD), + timeout=30, + ) + response.raise_for_status() + return study_id in response.json() -def test_upload_dicom_image(study_id, run_dicomweb_containers, dicomweb_uploader) -> None: +def _clean_up_dicomweb(study_id: str) -> None: + """Clean up the DICOMWeb server.""" + response = requests.delete( + LOCAL_DICOMWEB_URL + "/studies/" + study_id, + auth=(DICOMWEB_USERNAME, DICOMWEB_PASSWORD), + timeout=30, + ) + response.raise_for_status() + + +def test_upload_dicom_image( + study_id, run_containers, dicomweb_uploader, not_yet_exported_dicom_image +) -> None: """Tests that DICOM image can be uploaded to a DICOMWeb server""" - # ARRANGE - - # ACT - stow_response = dicomweb_uploader.send_via_stow(study_id) - studies_response = _do_get_request("/dicom-web/studies", data={"Uri": "/instances"}) - servers_response = _do_get_request("/dicom-web/servers") - - # ASSERT - # Check if dicom-web server is set up correctly - assert DICOM_ENDPOINT_NAME in servers_response.json() - assert stow_response.status_code == 200 # succesful upload - # Taken from https://orthanc.uclouvain.be/hg/orthanc-dicomweb/file/default/Resources/Samples/Python/SendStow.py - # Check that instance has not been discarded - assert "00081190" in studies_response.json()[0] + response = dicomweb_uploader.send_via_stow( + study_id, not_yet_exported_dicom_image.hashed_identifier + ) + response.raise_for_status() + + # Check that the instance has arrived on the DICOMweb server + time.sleep(2) + assert _check_study_present_on_dicomweb(study_id) + + _clean_up_dicomweb(study_id) + + +def test_upload_dicom_image_already_exported( + study_id, run_containers, dicomweb_uploader, already_exported_dicom_image +) -> None: + """Tests that exception thrown if DICOM image already exported""" + with pytest.raises(RuntimeError, match="Image already exported"): + dicomweb_uploader.send_via_stow(study_id, already_exported_dicom_image.hashed_identifier) + + +def test_dicomweb_upload_fails_with_wrong_credentials( + study_id, run_containers, dicomweb_uploader +) -> None: + """Tests that the DICOMWeb uploader fails when given wrong credentials.""" + dicomweb_uploader.endpoint_user = "wrong" + dicomweb_uploader.endpoint_password = "wrong" # noqa: S105, hardcoded password + + with pytest.raises(requests.exceptions.ConnectionError): + dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method + + +def test_dicomweb_upload_fails_with_wrong_url(study_id, run_containers, dicomweb_uploader) -> None: + """Tests that the DICOMWeb uploader fails when given wrong URL.""" + dicomweb_uploader.endpoint_url = "http://wrong" + + with pytest.raises(requests.exceptions.ConnectionError): + dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method diff --git a/pixl_core/tests/uploader/test_ftps.py b/pixl_core/tests/uploader/test_ftps.py index a9c53a26b..06e7bfe66 100644 --- a/pixl_core/tests/uploader/test_ftps.py +++ b/pixl_core/tests/uploader/test_ftps.py @@ -14,20 +14,66 @@ """Test functionality to upload files to an FTPS endpoint.""" import filecmp -import pathlib +import os +from collections.abc import Generator from datetime import datetime, timezone +from pathlib import Path import pandas as pd import pytest from core.db.models import Image from core.db.queries import update_exported_at from core.exports import ParquetExport +from core.uploader._ftps import FTPSUploader +from pytest_pixl.plugin import FtpHostAddress from sqlalchemy.exc import NoResultFound +TEST_DIR = Path(__file__).parents[1] + + +class MockFTPSUploader(FTPSUploader): + """Mock FTPSUploader for testing.""" + + def __init__(self) -> None: + """Initialise the mock uploader with hardcoded values for FTPS config.""" + self.host = os.environ["FTP_HOST"] + self.user = os.environ["FTP_USER_NAME"] + self.password = os.environ["FTP_PASSWORD"] + self.port = int(os.environ["FTP_PORT"]) + + +@pytest.fixture() +def ftps_uploader() -> MockFTPSUploader: + """Return a MockFTPSUploader object.""" + return MockFTPSUploader() + + +@pytest.fixture() +def ftps_home_dir(ftps_server) -> Path: + """ + Return the FTPS server home directory, the ftps_server fixture already uses + pytest.tmp_path_factory, so no need to clean up. + """ + return Path(ftps_server.home_dir) + + +@pytest.fixture(scope="session") +def ftp_host_address(): + """Run FTP on localhost - no docker containers need to access it""" + return FtpHostAddress.LOCALHOST + + +@pytest.fixture() +def zip_content() -> Generator: + """Directory containing the test data for uploading to the ftp server.""" + test_zip_file = TEST_DIR / "data" / "public.zip" + with test_zip_file.open("rb") as file_content: + yield file_content + @pytest.mark.usefixtures("ftps_server") -def test_upload_dicom_image( - test_zip_content, not_yet_exported_dicom_image, ftps_uploader, ftps_home_dir +def test_send_via_ftps( + zip_content, not_yet_exported_dicom_image, ftps_uploader, ftps_home_dir ) -> None: """Tests that DICOM image can be uploaded to the correct location""" # ARRANGE @@ -37,7 +83,7 @@ def test_upload_dicom_image( expected_output_file = ftps_home_dir / project_slug / (pseudo_anon_id + ".zip") # ACT - ftps_uploader.upload_dicom_image(test_zip_content, pseudo_anon_id, project_slug) + ftps_uploader.send_via_ftps(zip_content, pseudo_anon_id, project_slug) # ASSERT assert expected_output_file.exists() @@ -45,7 +91,7 @@ def test_upload_dicom_image( @pytest.mark.usefixtures("ftps_server") def test_upload_dicom_image_already_exported( - test_zip_content, already_exported_dicom_image, ftps_uploader + zip_content, already_exported_dicom_image, ftps_uploader ) -> None: """Tests that exception thrown if DICOM image already exported""" # ARRANGE @@ -55,11 +101,11 @@ def test_upload_dicom_image_already_exported( # ASSERT with pytest.raises(RuntimeError, match="Image already exported"): - ftps_uploader.upload_dicom_image(test_zip_content, pseudo_anon_id, project_slug) + ftps_uploader.send_via_ftps(zip_content, pseudo_anon_id, project_slug) @pytest.mark.usefixtures("ftps_server") -def test_upload_dicom_image_unknown(test_zip_content, ftps_uploader) -> None: +def test_upload_dicom_image_unknown(zip_content, ftps_uploader) -> None: """ Tests that a different exception is thrown if image is not recognised in the PIXL DB. @@ -72,7 +118,7 @@ def test_upload_dicom_image_unknown(test_zip_content, ftps_uploader) -> None: # ASSERT with pytest.raises(NoResultFound): - ftps_uploader.upload_dicom_image(test_zip_content, pseudo_anon_id, project_slug) + ftps_uploader.send_via_ftps(zip_content, pseudo_anon_id, project_slug) def test_update_exported_and_save(rows_in_session) -> None: @@ -113,9 +159,7 @@ def test_upload_parquet(parquet_export, ftps_home_dir, ftps_uploader) -> None: """Tests that parquet files are uploaded to the correct location (but ignore their contents)""" # ARRANGE - parquet_export.copy_to_exports( - pathlib.Path(__file__).parents[3] / "test" / "resources" / "omop" - ) + parquet_export.copy_to_exports(Path(__file__).parents[3] / "test" / "resources" / "omop") parquet_export.export_radiology_linker(pd.DataFrame(list("dummy"), columns=["D"])) # ACT diff --git a/pixl_export/src/pixl_export/main.py b/pixl_export/src/pixl_export/main.py index 66e8b8823..55c0f7c12 100644 --- a/pixl_export/src/pixl_export/main.py +++ b/pixl_export/src/pixl_export/main.py @@ -23,17 +23,15 @@ from pathlib import Path from core.exports import ParquetExport -from core.project_config import load_project_config from core.rest_api.router import router from core.uploader import get_uploader +from core.uploader._orthanc import get_tags_by_study from decouple import config # type: ignore [import-untyped] from fastapi import FastAPI, HTTPException from fastapi.responses import JSONResponse from loguru import logger from pydantic import BaseModel -from ._orthanc import get_study_zip_archive, get_tags_by_study - # Set up logging as main entry point logger.remove() # Remove all handlers added so far, including the default one. logging_level = config("LOG_LEVEL", default="INFO") @@ -107,11 +105,8 @@ def export_dicom_from_orthanc(study_data: StudyData) -> None: the hashed image ID (MRN + Accession number). """ study_id = study_data.study_id - hashed_image_id, project_slug = get_tags_by_study(study_id) - project_config = load_project_config(project_slug) - destination = project_config.destination.dicom - - uploader = get_uploader(project_slug, destination, project_config.project.azure_kv_alias) - logger.debug("Sending {} via '{}'", study_id, destination) - zip_content = get_study_zip_archive(study_id) - uploader.upload_dicom_image(zip_content, hashed_image_id, project_slug) + _, project_slug = get_tags_by_study(study_id) + + uploader = get_uploader(project_slug) + logger.debug("Sending {} via '{}'", study_id, type(uploader).__name__) + uploader.upload_dicom_image(study_id) diff --git a/test/docker-compose.yml b/test/docker-compose.yml index 4818e49ce..fb6d20d93 100644 --- a/test/docker-compose.yml +++ b/test/docker-compose.yml @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -version: "3.8" - volumes: vna-qr-data: @@ -35,7 +33,7 @@ services: - "4243:4242" - "8043:8042" volumes: - - ${PWD}/vna_config/:/run/secrets:ro + - ./vna_config/:/run/secrets:ro healthcheck: test: ["CMD-SHELL", "/probes/test-aliveness.py --user=orthanc --pwd=orthanc"] start_period: 10s @@ -49,10 +47,10 @@ services: platform: linux/amd64 environment: ORTHANC_NAME: "dicomweb" - ORTHANC_USERNAME: "orthanc" - ORTHANC_PASSWORD: "orthanc" + ORTHANC_USERNAME: "orthanc_dicomweb" + ORTHANC_PASSWORD: "orthanc_dicomweb" ORTHANC_AE_TITLE: "DICOMWEB" - RAW_AE_TITLE: ORHTANCRAW + RAW_AE_TITLE: ORTHANCRAW RAW_DICOM_PORT: "4242" RAW_IP_ADDR: "dicomweb-server" # aka. hostname DICOM_WEB_PLUGIN_ENABLED: true @@ -60,11 +58,15 @@ services: - "4244:4242" - "8044:8042" volumes: - - ${PWD}/dicomweb_config/:/run/secrets:ro + - ./dicomweb_config/:/run/secrets:ro networks: pixl-net: healthcheck: - test: ["CMD-SHELL", "/probes/test-aliveness.py --user=orthanc --pwd=orthanc"] + test: + [ + "CMD-SHELL", + "/probes/test-aliveness.py --user=orthanc_dicomweb --pwd=orthanc_dicomweb", + ] start_period: 10s retries: 2 interval: 3s diff --git a/test/system_test.py b/test/system_test.py index d5d698c9b..6d7ae6171 100644 --- a/test/system_test.py +++ b/test/system_test.py @@ -181,17 +181,27 @@ def _check_dcm_tags_from_zip( @pytest.mark.usefixtures("_setup_pixl_cli_dicomweb") def test_dicomweb_upload() -> None: """Check upload to DICOMweb server was successful""" - # This should point to the orthanc-anon server - ORTHANC_URL = "http://localhost:7003" + # This should point to the dicomweb server, as seen from the local host machine + LOCAL_DICOMWEB_URL = "http://localhost:8044" - def check_dicomweb_study_present() -> bool: + dicomweb_studies: list[str] = [] + + def dicomweb_studies_list() -> str: + return f"DICOMweb studies found: {dicomweb_studies}" + + def two_studies_present_on_dicomweb() -> bool: + nonlocal dicomweb_studies response = requests.get( - ORTHANC_URL + "/dicom-web/studies", - auth=("orthanc_anon_username", "orthanc_anon_password"), - data={"Uri": "/instances"}, + LOCAL_DICOMWEB_URL + "/studies", + auth=("orthanc_dicomweb", "orthanc_dicomweb"), timeout=30, ) - # Taken from https://orthanc.uclouvain.be/hg/orthanc-dicomweb/file/default/Resources/Samples/Python/SendStow.py - return response.status_code == 200 and "00081190" in response.json()[0] - - wait_for_condition(check_dicomweb_study_present) + dicomweb_studies = response.json() + return len(dicomweb_studies) == 2 + + wait_for_condition( + two_studies_present_on_dicomweb, + seconds_max=121, + seconds_interval=10, + progress_string_fn=dicomweb_studies_list, + )