diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19796285..deb2aeb3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,8 +66,8 @@ jobs: run: python -m mypy src tests - unit-tests: - name: unit tests on ${{ matrix.python-version }} + tests: + name: tests on ${{ matrix.python-version }} runs-on: ubuntu-22.04 strategy: fail-fast: false @@ -78,7 +78,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: - python-version: ${{ env.PYTHON_LATEST }} + python-version: ${{ matrix.python-version }} - uses: snok/install-poetry@v1 with: version: ${{ env.POETRY_VERSION }} @@ -87,92 +87,15 @@ jobs: poetry self add "poetry-dynamic-versioning[plugin]" poetry install - - run: python -m pytest -m 'not e2e' + - run: coverage run -m pytest env: # FoxOps test configuration - FOXOPS_GITLAB_ADDRESS: https://nonsense.com/api/v4 - FOXOPS_GITLAB_TOKEN: nonsense - - # Test runner configuration - COVERAGE_FILE: .coverage.unit.${{ matrix.python-version }} - - - name: Upload coverage data - uses: actions/upload-artifact@v3 - with: - name: coverage-data - path: .coverage.* - if-no-files-found: ignore - - - e2e-tests: - name: e2e tests on ${{ matrix.python-version }} - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - matrix: - python-version: ["3.11"] - - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_LATEST }} - - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - name: Install dependencies - run: | - poetry self add "poetry-dynamic-versioning[plugin]" - poetry install - - - name: Start GitLab test instance - run: | - docker compose up -d - ./scripts/await-healthy.sh - - - run: python -m pytest -m 'e2e' - env: - # FoxOps test configuration - FOXOPS_GITLAB_ADDRESS: https://nonsense.com/api/v4 - FOXOPS_GITLAB_TOKEN: nonsense - - # Test runner configuration - COVERAGE_FILE: .coverage.e2e.${{ matrix.python-version }} - - - name: Upload coverage data - uses: actions/upload-artifact@v3 - with: - name: coverage-data - path: .coverage.* - if-no-files-found: ignore - - - coverage: - name: Combine & check coverage - runs-on: ubuntu-latest - needs: [unit-tests, e2e-tests] - - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 - with: - # Use latest Python, so it understands all syntax. - python-version: ${{env.PYTHON_LATEST}} - - run: python -m pip install --upgrade coverage[toml] - - uses: actions/download-artifact@v3 - with: - name: coverage-data - - name: Combine coverage & fail if it's <70%. - run: | - python -m coverage combine - python -m coverage html --skip-covered --skip-empty - python -m coverage report --fail-under=70 - - name: Upload HTML report - uses: actions/upload-artifact@v3 - with: - name: html-report - path: htmlcov + FOXOPS_TESTS_GITLAB_ADDRESS: https://gitlab.com + FOXOPS_TESTS_GITLAB_ROOT_GROUP_ID: 73622910 + FOXOPS_TESTS_GITLAB_TOKEN: ${{ secrets.FOXOPS_TESTS_GITLAB_TOKEN }} + - name: Check test coverage + run: coverage report package: name: Build & verify package diff --git a/.gitignore b/.gitignore index 9922178b..e068cc76 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,11 @@ .python-version __pycache__/ dist/ +run/ .coverage* docs/build/ .idea test.db **/.DS_Store .dmypy.json +.env.test diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 69a9b008..76fd4536 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,45 +4,80 @@ This project uses [Poetry](https://python-poetry.org/) and [poetry-dynamic-versioning](https://pypi.org/project/poetry-dynamic-versioning/) for dependency management and the package building process. -## Running tests +## Run foxops locally -The test suite uses `pytest` as a test runner and it's located under `tests/`. +### With docker-compose -The unit tests can be executed by excluding the `e2e` tests: +The easiest way to get started is to run foxops locally via docker-compose. Just execute this command in the root folder of the project: -``` -pytest -m 'not e2e' +```shell +docker compose up ``` -which doesn't require any external database nor GitLab instance. +It will build the docker image and run foxops with an SQlite database and a local hoster configuration. -To run the `e2e` tests a test GitLab instance needs to be available. It can be started using `docker compose`: +Now foxops can be accessed at `http://localhost:8000` with `dummy` as the token. -``` -docker compose up -d +### Directly with Python + +First, start by making sure that your virtual Python environment is up to date by running + +```shell +poetry install ``` -Be aware that an **initial startup of the Gitlab instance can take some time** (5 minutes). Check the logs with `docker-compose logs` to verify it if reached a stable state. +Then, you can run foxops with -Then, the tests can be run using `pytest`: +```shell +export FOXOPS_DATABASE_URL=sqlite+aiosqlite:///./foxops.db +export FOXOPS_HOSTER_TYPE=local +export FOXOPS_HOSTER_LOCAL_DIRECTORY= +export FOXOPS_STATIC_TOKEN=dummy -``` -pytest -m 'e2e' +# initialize sqlite database in ./foxops.db +poetry run alembic upgrade head + +# run foxops webserver +poetry run uvicorn foxops.__main__:create_app --host localhost --port 5001 --reload --factory ``` -## Running foxops locally +## Running Tests -The foxops API can be run locally using `uvicorn`: +The test suite uses `pytest` as a test runner and it's located under `tests/`. -``` -uvicorn foxops.__main__:create_app --host localhost --port 5001 --reload --factory +Simply execute the following commands to run the entire foxops test suite: + +```shell +pytest ``` -For this to work foxops needs a few configuration settings to be available. -These are at least a GitLab address and token. To use the test instance you can run the following +Tests can also run with parallelization enabled to speed up execution. To do so, simply add the `-n` flag with the number of parallel processes to use: +```shell +pytest -n 4 ``` -FOXOPS_STATIC_TOKEN=dummy FOXOPS_GITLAB_ADDRESS=http://localhost:5002/api/v4 FOXOPS_GITLAB_TOKEN=ACCTEST1234567890123 uvicorn foxops.__main__:create_app --host localhost --port 5001 --reload --factory + +Some tests require a Gitlab instance to be available and will be skipped automatically if it's not the case. + +### Run Tests that Require Gitlab + +To run tests that require a Gitlab instance, you can either [run one locally](https://docs.gitlab.com/ee/install/docker.html) or use the public [gitlab.com](https://gitlab.com) instance. The latter is typically recommended for ease of use. + +On that Gitlab instance, a "root group" is required, in which foxops can create temporary projects for test templates and incarnations (these will be automatically cleaned after the test execution). Also an access token is required that has access to that group. + +Once you have all this, add the following environment variables and rerun the tests: + +```shell +# defaults to "https://gitlab.com" if not specified +export FOXOPS_TESTS_GITLAB_ADDRESS=
+# defaults to 73622910, which is the "foxops-e2e" group on gitlab.com +export FOXOPS_TESTS_GITLAB_ROOT_GROUP_ID= +export FOXOPS_TESTS_GITLAB_TOKEN= + +# these variables can also be set in a file called `.env.test` in the root folder of the project + +# execute tests (parallelization is recommended) +pytest -n 4 ``` ### Documentation diff --git a/docker-compose.yml b/docker-compose.yml index f534e130..8c40ded0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,36 +1,21 @@ -# by default gitlab will run on localhost (i.e. same host where tests are run) listening on port 5002 -# this can be overriden with environment variables: GITLAB_HOST & GITLAB_PORT & GITLAB_ADDRESS -# Eg: -# env GITLAB_HOST="myserver.example.com" GITLAB_PORT="8002" docker-compose up -d -# env GITLAB_HOST="myserver.example.com" GITLAB_PORT="8002" ./script/await-healthy.sh -# env GITLAB_ADDRESS="http://myserver.example.com:8002/api/v4" poetry run pytest -m 'e2e' - version: '3' services: - gitlab-ce: - image: gitlab/gitlab-ce:${GITLAB_CE_VERSION:-15.8.3-ce.0} - restart: always + foxops: + build: + context: . + dockerfile: Dockerfile ports: - - ${GITLAB_PORT:-5002}:80 - environment: - GITLAB_ROOT_PASSWORD: dvqMom4ruD9oqcErwtij - GITLAB_OMNIBUS_CONFIG: | - external_url "http://${GITLAB_HOST:-127.0.0.1}:${GITLAB_PORT:-5002}" - nginx['listen_port'] = 80 - labels: - foxops-gitlab/owned: '' + - 8000:8000 volumes: - - config-ce:/etc/gitlab - - logs-ce:/var/log/gitlab - - data-ce:/var/opt/gitlab - - ${PWD}/scripts/healthcheck-and-setup.sh:/healthcheck-and-setup.sh:Z - healthcheck: - test: /healthcheck-and-setup.sh - interval: 10s - timeout: 2m + - database:/app/database + - hoster:/app/hoster + environment: + FOXOPS_DATABASE_URL: sqlite+aiosqlite:////app/database/foxops.db + FOXOPS_HOSTER_TYPE: local + FOXOPS_HOSTER_LOCAL_DIRECTORY: /app/hoster + FOXOPS_STATIC_TOKEN: dummy volumes: - config-ce: - logs-ce: - data-ce: + database: + hoster: diff --git a/docs/source/installation.md b/docs/source/installation.md index 831dd669..09b89208 100644 --- a/docs/source/installation.md +++ b/docs/source/installation.md @@ -15,19 +15,49 @@ docker run --rm -v "$(pwd)"/foxops_db:/database \ alembic upgrade head ``` -With the database in place, we can now start the foxops API server: +With the database in place, we can now start the foxops API server. Two options exist for running it locally: + +### Run FoxOps Connected to an Existing Gitlab Instance + +If you already have a running Gitlab instance where you want to host your templates and incarnations, you can run foxops connected to that instance: + +```bash +export FOXOPS_HOSTER_GITLAB_ADDRESS=https://gitlab.com +export FOXOPS_HOSTER_GITLAB_TOKEN=glpat-abcdefgh123456 + +docker run --rm -p 8000:8000 -v "$(pwd)"/foxops_db:/database \ + -e FOXOPS_DATABASE_URL=sqlite+aiosqlite:////database/foxops.sqlite \ + -e FOXOPS_STATIC_TOKEN=dummy-token \ + -e FOXOPS_HOSTER_TYPE=gitlab \ + -e FOXOPS_HOSTER_GITLAB_ADDRESS=$FOXOPS_HOSTER_GITLAB_ADDRESS \ + -e FOXOPS_HOSTER_GITLAB_TOKEN=$FOXOPS_HOSTER_GITLAB_TOKEN \ + ghcr.io/roche/foxops:latest +``` + +### Run FoxOps Without Gitlab + +For the very first steps (development only), foxops can also be run without a Gitlab instance connected. This is not only useful for getting started quickly, but also when running tests in other systems that require a running foxops instance. + +Be aware that Gitlab in this is the replaced with a very basic implementation that creates all repositories/merge requests in a local folder. There is no UI for viewing or interacting with these repositories, except for your file browser. ```bash -export GITLAB_TOKEN=my-gitlab-token +# put a folder here where foxops should create the repositories +export FOXOPS_HOSTER_LOCAL_DIRECTORY=/tmp/foxops_hoster docker run --rm -p 8000:8000 -v "$(pwd)"/foxops_db:/database \ -e FOXOPS_DATABASE_URL=sqlite+aiosqlite:////database/foxops.sqlite \ -e FOXOPS_STATIC_TOKEN=dummy-token \ - -e FOXOPS_GITLAB_ADDRESS=https://gitlab.com/api/v4 \ - -e FOXOPS_GITLAB_TOKEN=$GITLAB_TOKEN \ + -e FOXOPS_HOSTER_TYPE=local \ + -e FOXOPS_HOSTER_LOCAL_DIRECTORY=$FOXOPS_HOSTER_LOCAL_DIRECTORY \ ghcr.io/roche/foxops:latest ``` +```{note} +In this setup, your template repositories must also be placed in the `/tmp/foxops_hoster` folder. Place the git repositories of your templates in a folder called `/tmp/foxops_hoster//git` to make them usable by foxops. +``` + +### Accessing FoxOps + Then, you can open your browser and go to the following URLs: * - the foxops web UI (login with `dummy-token` when prompted for the password) * - the foxops API documentation @@ -69,8 +99,9 @@ The following is needed to run foxops: * Set the following environment variables: * `FOXOPS_STATIC_TOKEN` - Set to a (long) random and **secret** string. This secret is used to authenticate all users of the foxops UI & API * `FOXOPS_DATABASE_URL` - Set to the database URL - * `FOXOPS_GITLAB_ADDRESS` - Set to the address of your GitLab instance (e.g. `https://gitlab.com/api/v4`) - * `FOXOPS_GITLAB_TOKEN` - Set to a GitLab access token that has access to all repositories (incarnations & templates) that foxops should manage + * `FOXOPS_HOSTER_TYPE` - Set to `gitlab` for a production deployment + * `FOXOPS_HOSTER_GITLAB_ADDRESS` - Set to the address of your GitLab instance (e.g. `https://gitlab.com`) (if hoster type is set to `gitlab`) + * `FOXOPS_HOSTER_GITLAB_TOKEN` - Set to a GitLab access token that has access to all repositories (incarnations & templates) that foxops should manage (if hoster type is set to `gitlab`) #### Kubernetes Example @@ -109,9 +140,11 @@ spec: - name: foxops image: ghcr.io/roche/foxops:v2.0.0 env: - - name: FOXOPS_GITLAB_ADDRESS - value: https://gitlab.com/api/v4 - - name: FOXOPS_GITLAB_TOKEN + - name: FOXOPS_HOSTER_TYPE + value: gitlab + - name: FOXOPS_HOSTER_GITLAB_ADDRESS + value: https://gitlab.com + - name: FOXOPS_HOSTER_GITLAB_TOKEN value: - name: FOXOPS_STATIC_TOKEN value: diff --git a/poetry.lock b/poetry.lock index a2aa4d3d..880094e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -220,7 +220,6 @@ files = [ ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" @@ -347,34 +346,14 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.9.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.9.0-py3-none-any.whl", hash = "sha256:9366c1f898981f09eb8da076716c02fd021f5a0e63581c66501d68a2e4eab844"}, + {file = "black-23.9.0.tar.gz", hash = "sha256:3511c8a7e22ce653f89ae90dfddaf94f3bb7e2587a245246572d3b9c92adf066"}, ] [package.dependencies] @@ -384,7 +363,6 @@ mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -547,69 +525,66 @@ files = [ [[package]] name = "coverage" -version = "7.3.0" +version = "7.3.1" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, - {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, - {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, - {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, - {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, - {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, - {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, - {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, - {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, - {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, - {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, - {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, - {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, - {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, ] -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - [package.extras] toml = ["tomli"] @@ -626,19 +601,19 @@ files = [ ] [[package]] -name = "exceptiongroup" -version = "1.1.3" -description = "Backport of PEP 654 (exception groups)" -category = "main" +name = "execnet" +version = "2.0.2" +description = "execnet: rapid multi-Python deployment" +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, ] [package.extras] -test = ["pytest (>=6)"] +testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "fastapi" @@ -770,14 +745,14 @@ files = [ [[package]] name = "furo" -version = "2023.8.19" +version = "2023.9.10" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "furo-2023.8.19-py3-none-any.whl", hash = "sha256:12f99f87a1873b6746228cfde18f77244e6c1ffb85d7fed95e638aae70d80590"}, - {file = "furo-2023.8.19.tar.gz", hash = "sha256:e671ee638ab3f1b472f4033b0167f502ab407830e0db0f843b1c1028119c9cd1"}, + {file = "furo-2023.9.10-py3-none-any.whl", hash = "sha256:513092538537dc5c596691da06e3c370714ec99bc438680edc1debffb73e5bfc"}, + {file = "furo-2023.9.10.tar.gz", hash = "sha256:5707530a476d2a63b8cad83b4f961f3739a69f4b058bcf38a03a39fa537195b2"}, ] [package.dependencies] @@ -1278,7 +1253,6 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.1.0" [package.extras] @@ -1440,6 +1414,7 @@ files = [ ] [package.dependencies] +python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} typing-extensions = ">=4.2.0" [package.extras] @@ -1475,23 +1450,21 @@ plugins = ["importlib-metadata"] [[package]] name = "pytest" -version = "7.4.1" +version = "7.4.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, - {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -1532,25 +1505,6 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - [[package]] name = "pytest-mock" version = "3.11.1" @@ -1584,6 +1538,27 @@ files = [ [package.dependencies] pytest = "*" +[[package]] +name = "pytest-xdist" +version = "3.3.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -1599,6 +1574,21 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2153,18 +2143,6 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "tornado" version = "6.3.3" @@ -2253,7 +2231,6 @@ files = [ [package.dependencies] click = ">=7.0" h11 = ">=0.8" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] @@ -2348,5 +2325,5 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" -python-versions = ">=3.10,<4.0" -content-hash = "7f910e328b48e0ef48eb17668e1a0f9347d613d6cff02b3ee9cab59c4b81595e" +python-versions = ">=3.11,<4.0" +content-hash = "2b1e5e46a667623e470e2e3b705aaf129494c0c46f545d93dd049d3b37c3b4b6" diff --git a/pyproject.toml b/pyproject.toml index bd067817..5cd0e33c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ fastapi = "^0.103.1" # Auxiliary httpx = "^0.24.1" tenacity = "^8.2.1" -pydantic = "^1.9.0" +pydantic = {version = "1.10.12", extras = ["dotenv"]} structlog = "^23.1.0" aiopath = "^0.6.10" @@ -49,7 +49,6 @@ isort = "^5.9.3" # Testing pytest = "^7.1.3" -pytest-cov = "^4.0.0" pytest-asyncio = "^0.21.0" pytest-mock = "^3.10.0" pytest-randomly = "^3.12.0" @@ -70,6 +69,8 @@ pytest-alembic = "^0.10.0" [tool.poetry.group.dev.dependencies] black = {extras = ["d"], version = "^23.3.0"} +coverage = "^7.3.1" +pytest-xdist = "^3.3.1" [tool.black] line-length = 120 @@ -78,9 +79,6 @@ line-length = 120 profile = "black" [tool.pytest.ini_options] -addopts = [ - '--cov=foxops', -] filterwarnings = [ 'ignore:directory "/var/run/secrets/foxops" does not exist' ] @@ -91,11 +89,20 @@ markers = [ "db: tests involving the database", ] asyncio_mode = "auto" -python_functions = "should_* test_*" -[[tool.mypy.overrides]] -module = "aiopath" -ignore_missing_imports = true +[tool.mypy] +plugins = [ + "pydantic.mypy", +] +overrides = [ + {module = "aiopath", ignore_missing_imports = true} +] + +[tool.coverage.run] +include = ["src/**"] + +[tool.coverage.report] +fail_under = 80 [tool.poetry-dynamic-versioning] enable = true diff --git a/src/foxops/__main__.py b/src/foxops/__main__.py index 249485eb..ecd2410e 100644 --- a/src/foxops/__main__.py +++ b/src/foxops/__main__.py @@ -3,13 +3,7 @@ from fastapi.staticfiles import StaticFiles from starlette.responses import FileResponse -from foxops import __version__ -from foxops.dependencies import ( - get_hoster, - get_hoster_settings, - get_settings, - static_token_auth_scheme, -) +from foxops.dependencies import get_settings, static_token_auth_scheme from foxops.error_handlers import __error_handlers__ from foxops.logger import get_logger, setup_logging from foxops.middlewares import request_id_middleware, request_time_middleware @@ -26,19 +20,10 @@ def create_app(): - app = FastAPI() - settings = get_settings() + setup_logging(level=settings.log_level) - @app.on_event("startup") - async def startup(): - # validate hoster - hoster = get_hoster(get_hoster_settings()) - await hoster.validate() - - setup_logging(level=settings.log_level) - - logger.info(f"Started foxops {__version__}") + app = FastAPI() # Add middlewares app.middleware("http")(request_id_middleware) diff --git a/src/foxops/dependencies.py b/src/foxops/dependencies.py index 0949d27b..5a255809 100644 --- a/src/foxops/dependencies.py +++ b/src/foxops/dependencies.py @@ -1,4 +1,4 @@ -from functools import lru_cache +from typing import Annotated from fastapi import Depends, HTTPException, Request, status from fastapi.openapi.models import APIKey, APIKeyIn @@ -7,42 +7,77 @@ from foxops.database.repositories.change import ChangeRepository from foxops.database.repositories.incarnation.repository import IncarnationRepository -from foxops.hosters import Hoster, HosterSettings -from foxops.hosters.gitlab import GitLab, GitLabSettings, get_gitlab_settings +from foxops.hosters import Hoster +from foxops.hosters.gitlab import GitlabHoster +from foxops.hosters.local import LocalHoster +from foxops.logger import get_logger from foxops.services.change import ChangeService from foxops.services.incarnation import IncarnationService -from foxops.settings import DatabaseSettings, Settings +from foxops.settings import ( + DatabaseSettings, + GitlabHosterSettings, + HosterType, + LocalHosterSettings, + Settings, +) -# NOTE: Yes, you may absolutely use proper dependency injection at some point. +logger = get_logger(__name__) -#: Holds a singleton of the database engine -async_engine: AsyncEngine | None = None - -@lru_cache def get_settings() -> Settings: return Settings() # type: ignore -@lru_cache def get_database_settings() -> DatabaseSettings: return DatabaseSettings() -@lru_cache -def get_hoster_settings() -> HosterSettings: - return GitLabSettings() # type: ignore +###### +# Global Dependencies (those that are only created once and then cached for later requests) +###### -def get_database_engine(settings: DatabaseSettings = Depends(get_database_settings)) -> AsyncEngine: - global async_engine +def get_database_engine(request: Request, settings: DatabaseSettings = Depends(get_database_settings)) -> AsyncEngine: + if hasattr(request.app.state, "database"): + return request.app.state.database - if async_engine is None: - async_engine = create_async_engine(settings.url.get_secret_value(), future=True, echo=False, pool_pre_ping=True) + async_engine = create_async_engine(settings.url.get_secret_value(), future=True, echo=False, pool_pre_ping=True) + request.app.state.database = async_engine return async_engine +def get_hoster(request: Request, settings: Annotated[Settings, Depends(get_settings)]) -> Hoster: + if hasattr(request.app.state, "hoster"): + return request.app.state.hoster + + hoster: Hoster + match settings.hoster_type: + case HosterType.LOCAL: + local_settings = LocalHosterSettings() + + logger.warning( + "Using local hoster. This is for DEVELOPMENT use only!", directory=str(local_settings.directory) + ) + + hoster = LocalHoster(local_settings.directory) + case HosterType.GITLAB: + gitlab_settings = GitlabHosterSettings() + logger.info("Using GitLab hoster", address=gitlab_settings.address) + + hoster = GitlabHoster(gitlab_settings.address, gitlab_settings.token.get_secret_value()) + case _: + raise NotImplementedError(f"Unknown hoster type {settings.hoster_type}") + + request.app.state.hoster = hoster + return hoster + + +###### +# Per-Request Dependencies +###### + + def get_incarnation_repository(database_engine: AsyncEngine = Depends(get_database_engine)) -> IncarnationRepository: return IncarnationRepository(database_engine) @@ -51,15 +86,6 @@ def get_change_repository(database_engine: AsyncEngine = Depends(get_database_en return ChangeRepository(database_engine) -def get_hoster(settings: HosterSettings = Depends(get_gitlab_settings)) -> Hoster: - # this assert makes mypy happy - assert isinstance(settings, GitLabSettings) - return GitLab( - address=settings.address, - token=settings.token.get_secret_value(), - ) - - def get_incarnation_service( incarnation_repository: IncarnationRepository = Depends(get_incarnation_repository), hoster: Hoster = Depends(get_hoster), diff --git a/src/foxops/external/git.py b/src/foxops/external/git.py index d39a6cac..b2d94c18 100644 --- a/src/foxops/external/git.py +++ b/src/foxops/external/git.py @@ -34,7 +34,8 @@ def __init__(self, ref: bytes): GIT_ERROR_ORACLE = { re.compile( - rb"hint: Updates were rejected because the remote contains work that you do\nhint: not have locally." + rb"hint: Updates were rejected because the remote contains work that you do.*not.*have locally.", + re.DOTALL, ): RebaseRequiredError, re.compile(rb"hint: Updates were rejected because the tip of your current branch is behind"): RebaseRequiredError, re.compile(rb"error: cannot lock ref '.*': is at [a-f0-9]+ but expected [a-f0-9]+"): RebaseRequiredError, diff --git a/src/foxops/hosters/__init__.py b/src/foxops/hosters/__init__.py index db9a3aa3..e8d75928 100644 --- a/src/foxops/hosters/__init__.py +++ b/src/foxops/hosters/__init__.py @@ -1,7 +1,6 @@ from foxops.hosters.types import ( # noqa: F401 GitSha, Hoster, - HosterSettings, MergeRequestId, ReconciliationStatus, ) diff --git a/src/foxops/hosters/gitlab/gitlab.py b/src/foxops/hosters/gitlab.py similarity index 99% rename from src/foxops/hosters/gitlab/gitlab.py rename to src/foxops/hosters/gitlab.py index 221aeb14..f9593367 100644 --- a/src/foxops/hosters/gitlab/gitlab.py +++ b/src/foxops/hosters/gitlab.py @@ -67,7 +67,7 @@ def evaluate_gitlab_address(address: str) -> tuple[str, str]: return address, f"{address}/api/v4" -class GitLab(Hoster): +class GitlabHoster(Hoster): """REST API client for GitLab""" def __init__(self, address: str, token: str): diff --git a/src/foxops/hosters/gitlab/__init__.py b/src/foxops/hosters/gitlab/__init__.py deleted file mode 100644 index 45a44b3f..00000000 --- a/src/foxops/hosters/gitlab/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .gitlab import GitLab # noqa: F401 -from .settings import GitLabSettings, get_gitlab_settings # noqa: F401 diff --git a/src/foxops/hosters/gitlab/settings.py b/src/foxops/hosters/gitlab/settings.py deleted file mode 100644 index 60775ebc..00000000 --- a/src/foxops/hosters/gitlab/settings.py +++ /dev/null @@ -1,21 +0,0 @@ -from functools import cache - -from pydantic import SecretStr - -from foxops.hosters import HosterSettings - - -class GitLabSettings(HosterSettings): - """Gitlab specific settings.""" - - address: str - token: SecretStr - - class Config: - env_prefix: str = "foxops_gitlab_" - secrets_dir: str = "/var/run/secrets/foxops" - - -@cache -def get_gitlab_settings() -> GitLabSettings: - return GitLabSettings() # type: ignore diff --git a/src/foxops/hosters/local.py b/src/foxops/hosters/local.py index be7d697f..23341bec 100644 --- a/src/foxops/hosters/local.py +++ b/src/foxops/hosters/local.py @@ -1,10 +1,9 @@ import re import tempfile -from collections import defaultdict from contextlib import asynccontextmanager from datetime import timedelta from pathlib import Path -from typing import AsyncIterator +from typing import AsyncIterator, Iterator from pydantic import BaseModel @@ -26,11 +25,53 @@ class MergeRequest(BaseModel): status: MergeRequestStatus +class MergeRequestManager: + def __init__(self, directory: Path): + self.directory = directory + + # adding [0] to the max() call ensures that if the directory is empty, the next id will be 1 + self._next_id = 1 + max([0] + [int(p.name.removesuffix(".json")) for p in self.directory.glob("*.json")]) + + def add(self, title: str, description: str, source_branch: str) -> int: + mr = MergeRequest( + id=self._next_id, + title=title, + description=description, + source_branch=source_branch, + target_branch="main", + status=MergeRequestStatus.OPEN, + ) + + self._mr_file_path(mr.id).write_text(mr.json()) + self._next_id += 1 + + return mr.id + + def delete(self, id_: int) -> None: + self._mr_file_path(id_).unlink() + + def get(self, id_: int) -> MergeRequest: + return MergeRequest.parse_file(self._mr_file_path(id_)) + + def update_status(self, id_: int, status: MergeRequestStatus) -> None: + mr = self.get(id_) + mr.status = status + self._mr_file_path(mr.id).write_text(mr.json()) + + def __iter__(self) -> Iterator[MergeRequest]: + yield from [MergeRequest.parse_file(p) for p in self.directory.glob("*.json")] + + def _mr_file_path(self, id_: int) -> Path: + return self.directory / f"{id_}.json" + + class LocalHoster(Hoster): + GIT_PATH = "git" + MERGE_REQUESTS_PATH = "merge_requests" + def __init__(self, directory: Path, push_delay_seconds: int = 0): self.directory = directory - self._merge_requests: dict[str, list[MergeRequest]] = defaultdict(list) self.push_delay_seconds = push_delay_seconds async def validate(self) -> None: @@ -43,12 +84,12 @@ async def create_repository(self, repository: str) -> None: if not re.fullmatch(r"^[a-z0-9-_]+$", repository): raise ValueError("Invalid repository name, must only contain lowercase letters, numbers and dashes.") - git_directory = self.directory / repository - git_directory.mkdir(parents=False, exist_ok=False) + self._repo_path(repository).mkdir(parents=True, exist_ok=False) + self._mr_path(repository).mkdir(parents=True, exist_ok=False) # we're creating a bare repository because "clients" of this hoster will be cloning and pushing to it # git doesn't allow pushing to the checked-out branch of a repository - await git_exec("init", "--bare", cwd=git_directory) + await git_exec("init", "--bare", cwd=self._repo_path(repository)) async def get_incarnation_state( self, incarnation_repository: str, target_directory: str @@ -67,34 +108,25 @@ async def get_incarnation_state( async def merge_request( self, *, incarnation_repository: str, source_branch: str, title: str, description: str, with_automerge=False ) -> tuple[GitSha, MergeRequestId]: - existing_merge_requests = self._merge_requests[incarnation_repository] - - new_merge_request = MergeRequest( - id=len(existing_merge_requests), - title=title, - description=description, - source_branch=source_branch, - target_branch="main", - status=MergeRequestStatus.OPEN, - ) + mr_manager = self._mr_manager(incarnation_repository) commit_id = await self.has_pending_incarnation_branch(incarnation_repository, source_branch) if commit_id is None: raise ValueError("Branch does not exist") - existing_merge_requests.append(new_merge_request) + mr_id = mr_manager.add(title, description, source_branch) if with_automerge: - await self.merge_merge_request(incarnation_repository, str(new_merge_request.id)) + await self.merge_merge_request(incarnation_repository, str(mr_id)) - return commit_id, str(new_merge_request.id) + return commit_id, str(mr_id) def get_merge_request(self, incarnation_repository: str, merge_request_id: str) -> MergeRequest: - return self._merge_requests[incarnation_repository][int(merge_request_id)] + return self._mr_manager(incarnation_repository).get(int(merge_request_id)) def close_merge_request(self, incarnation_repository: str, merge_request_id: str) -> None: mr = self.get_merge_request(incarnation_repository, merge_request_id) - mr.status = MergeRequestStatus.CLOSED + self._mr_manager(incarnation_repository).update_status(mr.id, MergeRequestStatus.CLOSED) async def merge_merge_request(self, incarnation_repository: str, merge_request_id: str): mr = self.get_merge_request(incarnation_repository, merge_request_id) @@ -104,7 +136,7 @@ async def merge_merge_request(self, incarnation_repository: str, merge_request_i await repo.merge(f"origin/{mr.source_branch}", ff_only=False) await repo.push() - mr.status = MergeRequestStatus.MERGED + self._mr_manager(incarnation_repository).update_status(mr.id, MergeRequestStatus.MERGED) @asynccontextmanager async def cloned_repository( @@ -138,7 +170,7 @@ async def cloned_repository( async def has_pending_incarnation_branch(self, project_identifier: str, branch: str) -> GitSha | None: try: - result = await git_exec("rev-parse", f"refs/heads/{branch}", cwd=self.directory / project_identifier) + result = await git_exec("rev-parse", f"refs/heads/{branch}", cwd=self._repo_path(project_identifier)) except GitError as e: if e.message.index("unknown revision or path not in the working tree") >= 0: return None @@ -154,7 +186,7 @@ async def has_pending_incarnation_branch(self, project_identifier: str, branch: async def has_pending_incarnation_merge_request( self, project_identifier: str, branch: str ) -> MergeRequestId | None: - for mr in self._merge_requests[project_identifier]: + for mr in self._mr_manager(project_identifier): if mr.source_branch == branch and mr.status == MergeRequestStatus.OPEN: return str(mr.id) @@ -178,7 +210,7 @@ async def get_reconciliation_status( async def does_commit_exist(self, incarnation_repository: str, commit_sha: GitSha) -> bool: try: - result = await git_exec("cat-file", "commit", commit_sha, cwd=self.directory / incarnation_repository) + result = await git_exec("cat-file", "commit", commit_sha, cwd=self._repo_path(incarnation_repository)) except GitError as e: if e.message.index("bad file") >= 0: return False @@ -190,19 +222,19 @@ async def does_commit_exist(self, incarnation_repository: str, commit_sha: GitSh raise RuntimeError(f"Unexpected return code from git cat-file: {result.returncode}") async def get_commit_url(self, incarnation_repository: str, commit_sha: GitSha) -> str: - return f"{self.directory / incarnation_repository}:commit/{commit_sha}" + return f"file://{self._repo_path(incarnation_repository)}:commit/{commit_sha}" async def get_merge_request_url(self, incarnation_repository: str, merge_request_id: str) -> str: - return f"{self.directory / incarnation_repository}:merge_requests/{merge_request_id}" + return f"file://{self._repo_path(incarnation_repository)}:merge_requests/{merge_request_id}" async def get_merge_request_status(self, incarnation_repository: str, merge_request_id: str) -> MergeRequestStatus: - merge_request_index = int(merge_request_id) + return self.get_merge_request(incarnation_repository, merge_request_id).status - existing_merge_requests = self._merge_requests[incarnation_repository] - if merge_request_index >= len(existing_merge_requests): - raise ValueError("Merge request does not exist") + def _repo_path(self, repository: str) -> Path: + return (self.directory / repository / self.GIT_PATH).absolute() - return existing_merge_requests[merge_request_index].status + def _mr_path(self, repository: str) -> Path: + return (self.directory / repository / self.MERGE_REQUESTS_PATH).absolute() - def _repo_path(self, repository: str) -> Path: - return (self.directory / repository).absolute() + def _mr_manager(self, repository: str) -> MergeRequestManager: + return MergeRequestManager(self._mr_path(repository)) diff --git a/src/foxops/hosters/types.py b/src/foxops/hosters/types.py index 734a5f4f..f5397a8d 100644 --- a/src/foxops/hosters/types.py +++ b/src/foxops/hosters/types.py @@ -2,8 +2,6 @@ from enum import Enum from typing import AsyncContextManager, Protocol, TypedDict -from pydantic import BaseSettings - from foxops.engine import IncarnationState from foxops.external.git import GitRepository @@ -82,7 +80,3 @@ async def get_merge_request_url(self, incarnation_repository: str, merge_request async def get_merge_request_status(self, incarnation_repository: str, merge_request_id: str) -> MergeRequestStatus: ... - - -class HosterSettings(BaseSettings): - pass diff --git a/src/foxops/logger.py b/src/foxops/logger.py index 7bcf55d5..55daec59 100644 --- a/src/foxops/logger.py +++ b/src/foxops/logger.py @@ -10,35 +10,6 @@ ) from structlog.types import Processor -shared_processors: Sequence[Processor] = [ - merge_contextvars, - structlog.stdlib.add_log_level, - structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"), - structlog.stdlib.add_logger_name, -] - -structlog.configure( - processors=shared_processors # type: ignore - + [ - # Prepare event dict for `ProcessorFormatter`. - structlog.stdlib.ProcessorFormatter.wrap_for_formatter, - ], - logger_factory=structlog.stdlib.LoggerFactory(), - cache_logger_on_first_use=True, -) - -formatter = structlog.stdlib.ProcessorFormatter( - # These run ONLY on `logging` entries that do NOT originate within - # structlog. - foreign_pre_chain=shared_processors, - # These run on ALL entries after the pre_chain is done. - processors=[ - # Remove _record & _from_structlog. - structlog.stdlib.ProcessorFormatter.remove_processors_meta, - structlog.dev.ConsoleRenderer(), - ], -) - def configure_sqlalchemy_logging(): logging.getLogger("sqlalchemy.engine.Engine").handlers.clear() @@ -63,6 +34,35 @@ def configure_uvicorn_logging(): def setup_logging(level: int | str) -> None: + shared_processors: Sequence[Processor] = [ + merge_contextvars, + structlog.stdlib.add_log_level, + structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"), + structlog.stdlib.add_logger_name, + ] + + structlog.configure( + processors=shared_processors # type: ignore + + [ + # Prepare event dict for `ProcessorFormatter`. + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + formatter = structlog.stdlib.ProcessorFormatter( + # These run ONLY on `logging` entries that do NOT originate within + # structlog. + foreign_pre_chain=shared_processors, + # These run on ALL entries after the pre_chain is done. + processors=[ + # Remove _record & _from_structlog. + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + structlog.dev.ConsoleRenderer(), + ], + ) + configure_sqlalchemy_logging() configure_uvicorn_logging() diff --git a/src/foxops/settings.py b/src/foxops/settings.py index 1eca72cc..653665b1 100644 --- a/src/foxops/settings.py +++ b/src/foxops/settings.py @@ -1,6 +1,7 @@ +from enum import Enum from pathlib import Path -from pydantic import BaseSettings, SecretStr +from pydantic import BaseSettings, DirectoryPath, SecretStr # extracted database-related settings into a separate class, as they are also needed by alembic @@ -13,11 +14,35 @@ class Config: secrets_dir = "/var/run/secrets/foxops" +class HosterType(Enum): + GITLAB = "gitlab" + LOCAL = "local" + + +class GitlabHosterSettings(BaseSettings): + address: str + token: SecretStr + + class Config: + env_prefix: str = "foxops_hoster_gitlab_" + secrets_dir: str = "/var/run/secrets/foxops" + + +class LocalHosterSettings(BaseSettings): + directory: DirectoryPath + + class Config: + env_prefix: str = "foxops_hoster_local_" + secrets_dir: str = "/var/run/secrets/foxops" + + class Settings(BaseSettings): static_token: SecretStr frontend_dist_dir: Path = Path("ui/dist") log_level: str = "INFO" + hoster_type: HosterType = HosterType.LOCAL + class Config: env_prefix = "foxops_" secrets_dir = "/var/run/secrets/foxops" diff --git a/tests/_plugins/__init__.py b/tests/_plugins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/_plugins/fixtures_database.py b/tests/_plugins/fixtures_database.py new file mode 100644 index 00000000..e40ce3d0 --- /dev/null +++ b/tests/_plugins/fixtures_database.py @@ -0,0 +1,26 @@ +import pytest +from sqlalchemy import Engine, event +from sqlalchemy.ext.asyncio import create_async_engine + +from foxops.database.schema import meta + + +@pytest.fixture +async def foxops_database(tmp_path): + """Prepares a temporary database for foxops and returns the engine URL""" + + engine_url = f"sqlite+aiosqlite:///{str(tmp_path)}/foxops.db" + + # enforce foreign key constraints on SQLite: + # https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#foreign-key-support + @event.listens_for(Engine, "connect") + def set_sqlite_pragma(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + + async_engine = create_async_engine(engine_url, future=True, echo=False, pool_pre_ping=True) + async with async_engine.begin() as conn: + await conn.run_sync(meta.create_all) + + yield engine_url diff --git a/tests/_plugins/fixtures_gitlab.py b/tests/_plugins/fixtures_gitlab.py new file mode 100644 index 00000000..0102f76c --- /dev/null +++ b/tests/_plugins/fixtures_gitlab.py @@ -0,0 +1,68 @@ +import uuid + +import pytest +from httpx import Client, HTTPStatusError, Timeout +from pydantic import BaseSettings, SecretStr + + +class GitlabTestSettings(BaseSettings): + address: str = "https://gitlab.com" + token: SecretStr | None = None + + # "foxops-e2e" group on gitlab.com + root_group_id: int = 73622910 + + class Config: + env_prefix = "FOXOPS_TESTS_GITLAB_" + env_file = ".env.test" + + +@pytest.fixture(scope="session") +def gitlab_settings(): + gitlab_settings = GitlabTestSettings() + if gitlab_settings.token is None: + pytest.skip( + f"No Gitlab token provided (via FOXOPS_TESTS_GITLAB_TOKEN environment variable) " + f"to access group '{gitlab_settings.root_group_id}' on '{gitlab_settings.address}'" + ) + + return gitlab_settings + + +@pytest.fixture(scope="session") +def gitlab_client(gitlab_settings: GitlabTestSettings) -> Client: + assert gitlab_settings.token is not None + + return Client( + base_url=gitlab_settings.address + "/api/v4", + headers={"PRIVATE-TOKEN": gitlab_settings.token.get_secret_value()}, + timeout=Timeout(120), + ) + + +@pytest.fixture(scope="session") +def gitlab_project_factory(gitlab_client: Client, gitlab_settings: GitlabTestSettings): + def _factory(name: str): + suffix = str(uuid.uuid4())[:8] + response = gitlab_client.post( + "/projects", json={"name": f"{name}-{suffix}", "namespace_id": gitlab_settings.root_group_id} + ) + response.raise_for_status() + project = response.json() + + created_project_ids.append(project["id"]) + + return project + + created_project_ids: list[int] = [] + + yield _factory + + # cleanup all projects that were created during the test, ignoring those that were already remove in the test + for project_id in created_project_ids: + response = gitlab_client.delete(f"/projects/{project_id}") + try: + response.raise_for_status() + except HTTPStatusError as e: + if e.response.status_code != 404: + raise diff --git a/tests/conftest.py b/tests/conftest.py index 7d6ff4be..256e8fe2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,9 +16,19 @@ from foxops.database.repositories.change import ChangeRepository from foxops.database.repositories.incarnation.repository import IncarnationRepository from foxops.database.schema import meta -from foxops.dependencies import get_change_repository, get_incarnation_repository +from foxops.dependencies import ( + get_change_repository, + get_hoster, + get_incarnation_repository, +) +from foxops.hosters.local import LocalHoster from foxops.logger import setup_logging +pytest_plugins = [ + "tests._plugins.fixtures_database", + "tests._plugins.fixtures_gitlab", +] + @pytest.fixture(scope="session", autouse=True) def setup_logging_for_tests(): @@ -65,7 +75,7 @@ def set_sqlite_pragma(dbapi_connection, connection_record): yield async_engine -@pytest.fixture(name="frontend", scope="module", autouse=True) +@pytest.fixture(name="frontend", scope="module") def create_dummy_frontend(tmp_path_factory: pytest.TempPathFactory): frontend_dir = tmp_path_factory.mktemp("frontend") for frontend_subdir in FRONTEND_SUBDIRS: @@ -75,15 +85,10 @@ def create_dummy_frontend(tmp_path_factory: pytest.TempPathFactory): return frontend_dir -@pytest.fixture(scope="module", autouse=True) -def set_settings_env(static_api_token: str): - os.environ["FOXOPS_GITLAB_ADDRESS"] = "https://nonsense.com/api/v4" - os.environ["FOXOPS_GITLAB_TOKEN"] = "nonsense" - os.environ["FOXOPS_STATIC_TOKEN"] = static_api_token - - @pytest.fixture(name="app") -def create_foxops_app() -> FastAPI: +def create_foxops_app(static_api_token: str, monkeypatch) -> FastAPI: + monkeypatch.setenv("FOXOPS_STATIC_TOKEN", static_api_token) + return create_app() @@ -97,6 +102,11 @@ async def change_repository(test_async_engine: AsyncEngine) -> ChangeRepository: return ChangeRepository(test_async_engine) +@pytest.fixture +def local_hoster(tmp_path: Path) -> LocalHoster: + return LocalHoster(tmp_path) + + @pytest.fixture(name="static_api_token", scope="session") def get_static_api_token() -> str: return "test-token" @@ -107,9 +117,11 @@ async def create_unauthenticated_client( app: FastAPI, incarnation_repository: IncarnationRepository, change_repository: ChangeRepository, + local_hoster: LocalHoster, ) -> AsyncGenerator[AsyncClient, None]: app.dependency_overrides[get_incarnation_repository] = lambda: incarnation_repository app.dependency_overrides[get_change_repository] = lambda: change_repository + app.dependency_overrides[get_hoster] = lambda: local_hoster async with AsyncClient( app=app, diff --git a/tests/e2e/assertions.py b/tests/e2e/assertions.py index 6f2304db..d4d0a2e2 100644 --- a/tests/e2e/assertions.py +++ b/tests/e2e/assertions.py @@ -1,10 +1,10 @@ from urllib.parse import quote_plus -from httpx import AsyncClient +from httpx import AsyncClient, Client -async def assert_file_in_repository( - gitlab_test_client: AsyncClient, +def assert_file_in_repository( + gitlab_client: Client, repository: str, file_path: str, file_content: str, @@ -14,7 +14,7 @@ async def assert_file_in_repository( if branch is not None: params["ref"] = branch - response = await gitlab_test_client.get( + response = gitlab_client.get( f"/projects/{quote_plus(repository)}/repository/files/{quote_plus(file_path)}/raw", params=params, ) @@ -43,12 +43,12 @@ async def assert_initialization_merge_request_exists( return merge_request["source_branch"] -async def assert_update_merge_request_exists( - gitlab_test_client: AsyncClient, +def assert_update_merge_request_exists( + gitlab_client: Client, repository: str, ): params = {"state": "opened", "target_branch": "main"} - response = await gitlab_test_client.get(f"/projects/{quote_plus(repository)}/merge_requests", params=params) + response = gitlab_client.get(f"/projects/{quote_plus(repository)}/merge_requests", params=params) response.raise_for_status() merge_requests = response.json() @@ -62,11 +62,11 @@ async def assert_update_merge_request_exists( return merge_request["source_branch"] -async def assert_update_merge_request_with_conflicts_exists( - gitlab_test_client: AsyncClient, repository: str, files_with_conflicts: list[str] +def assert_update_merge_request_with_conflicts_exists( + gitlab_client: Client, repository: str, files_with_conflicts: list[str] ): params = {"state": "opened", "target_branch": "main"} - response = await gitlab_test_client.get(f"/projects/{quote_plus(repository)}/merge_requests", params=params) + response = gitlab_client.get(f"/projects/{quote_plus(repository)}/merge_requests", params=params) response.raise_for_status() merge_requests = response.json() @@ -79,9 +79,7 @@ async def assert_update_merge_request_with_conflicts_exists( assert merge_request["title"].startswith("🚧 - CONFLICT: Update to") # Assert that there is a rejection file in the Merge Request changes - response = await gitlab_test_client.get( - f"/projects/{quote_plus(repository)}/merge_requests/{merge_request['iid']}/changes" - ) + response = gitlab_client.get(f"/projects/{quote_plus(repository)}/merge_requests/{merge_request['iid']}/changes") response.raise_for_status() changes = response.json()["changes"] diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index de289f1a..d2842a1f 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,128 +1,117 @@ import base64 -import os -import uuid +from typing import Callable from urllib.parse import quote_plus -import httpx import pytest -from httpx import AsyncClient, Client, Timeout +from httpx import AsyncClient, Client -#: Holds default settings for the GitLab test instance -#: can be overriden by environment variable -GITLAB_ADDRESS = "http://127.0.0.1:5002/api/v4" -GITLAB_ADMIN_TOKEN = "ACCTEST1234567890123" +from foxops.__main__ import create_app +from tests._plugins.fixtures_gitlab import GitlabTestSettings @pytest.fixture(scope="session") -def gitlab_test_address() -> str: - return os.environ.get("GITLAB_ADDRESS", GITLAB_ADDRESS) +def gitlab_template_repository(gitlab_client: Client, gitlab_project_factory: Callable[[str], dict]) -> str: + project = gitlab_project_factory("template") + ( + gitlab_client.post( + f"/projects/{project['id']}/repository/files/{quote_plus('fengine.yaml')}", + json={ + "encoding": "base64", + "content": base64.b64encode( + b""" +variables: + name: + type: str + description: The name of the person -@pytest.fixture(scope="session") -def gitlab_test_admin_token() -> str: - return os.environ.get("GITLAB_ADMIN_TOKEN", GITLAB_ADMIN_TOKEN) - - -@pytest.fixture(scope="session", name="gitlab_test_user_token") -def create_gitlab_test_user(test_run_id: str, gitlab_test_address: str, gitlab_test_admin_token: str): - client = Client( - base_url=gitlab_test_address, headers={"PRIVATE-TOKEN": gitlab_test_admin_token}, timeout=Timeout(120) - ) - - test_user_name = f"foxops-test-{test_run_id}" - response = client.post( - "/users", - json={ - "name": test_user_name, - "username": test_user_name, - "password": str(uuid.uuid4()), - "email": f"{test_user_name}@foxops.io", - "skip_confirmation": True, - }, - ) - response.raise_for_status() - user_id = response.json()["id"] + age: + type: int + description: The age of the person +""" + ).decode("utf-8"), + "commit_message": "Initial commit", + "branch": project["default_branch"], + }, + ) + ).raise_for_status() - try: - response = client.post( - f"/users/{user_id}/personal_access_tokens", + # VERSION v1.0.0 + ( + gitlab_client.post( + f"/projects/{project['id']}/repository/files/{quote_plus('template/README.md')}", json={ - "name": test_user_name, - "scopes": ["api", "read_repository", "write_repository"], + "encoding": "base64", + "content": base64.b64encode(b"{{ name }} is of age {{ age }}").decode("utf-8"), + "commit_message": "Add template README", + "branch": project["default_branch"], }, ) - response.raise_for_status() - test_user_token = response.json()["token"] - - yield test_user_token - finally: - response = client.delete(f"/users/{user_id}") - response.raise_for_status() - - -@pytest.fixture(scope="session", autouse=True) -def set_settings_env(gitlab_test_address: str, gitlab_test_user_token: str, static_api_token: str): - os.environ["FOXOPS_GITLAB_ADDRESS"] = gitlab_test_address - os.environ["FOXOPS_GITLAB_TOKEN"] = gitlab_test_user_token - os.environ["FOXOPS_STATIC_TOKEN"] = static_api_token - + ).raise_for_status() + ( + gitlab_client.post( + f"/projects/{project['id']}/repository/tags", + json={"tag_name": "v1.0.0", "ref": project["default_branch"]}, + ) + ).raise_for_status() -@pytest.fixture(name="gitlab_test_client") -async def create_test_gitlab_client(gitlab_test_address: str, gitlab_test_user_token: str) -> AsyncClient: - return AsyncClient( - base_url=gitlab_test_address, headers={"PRIVATE-TOKEN": gitlab_test_user_token}, timeout=Timeout(120) - ) + # VERSION: v2.0.0 + ( + gitlab_client.put( + f"/projects/{project['id']}/repository/files/{quote_plus('template/README.md')}", + json={ + "encoding": "base64", + "content": base64.b64encode(b"Hello {{ name }}, age: {{ age }}").decode("utf-8"), + "commit_message": "Change template README", + "branch": project["default_branch"], + }, + ) + ).raise_for_status() + ( + gitlab_client.post( + f"/projects/{project['id']}/repository/tags", + json={"tag_name": "v2.0.0", "ref": project["default_branch"]}, + ) + ).raise_for_status() + return project["path_with_namespace"] -@pytest.fixture(scope="function") -async def gitlab_project_factory(gitlab_test_client: AsyncClient): - async def _factory(name: str): - response = await gitlab_test_client.post("/projects", json={"name": name}) - response.raise_for_status() - project = response.json() - created_project_ids.append(project["id"]) +@pytest.fixture +async def foxops_client(gitlab_settings: GitlabTestSettings, foxops_database: str, monkeypatch): + static_token = "test-token" - return project + assert gitlab_settings.token is not None - created_project_ids: list[int] = [] + monkeypatch.setenv("FOXOPS_DATABASE_URL", foxops_database) + monkeypatch.setenv("FOXOPS_HOSTER_TYPE", "gitlab") + monkeypatch.setenv("FOXOPS_HOSTER_GITLAB_ADDRESS", gitlab_settings.address) + monkeypatch.setenv("FOXOPS_HOSTER_GITLAB_TOKEN", gitlab_settings.token.get_secret_value()) + monkeypatch.setenv("FOXOPS_STATIC_TOKEN", static_token) + monkeypatch.setenv("FOXOPS_LOG_LEVEL", "DEBUG") - yield _factory + async with AsyncClient( + app=create_app(), + base_url="http://test", + ) as client: + client.headers["Authorization"] = f"Bearer {static_token}" - # cleanup all projects that were created during the test, ignoring those that were already remove in the test - for project_id in created_project_ids: - response = await gitlab_test_client.delete(f"/projects/{project_id}") - try: - response.raise_for_status() - except httpx.HTTPStatusError as e: - if e.response.status_code != 404: - raise + yield client -@pytest.fixture(name="empty_incarnation_gitlab_repository") -async def create_empty_incarnation_gitlab_repository(gitlab_test_client: AsyncClient): - response = await gitlab_test_client.post("/projects", json={"name": f"incarnation-{str(uuid.uuid4())}"}) - response.raise_for_status() - project = response.json() - try: - # TODO: considering returning `project`, annotated with a `TypedDict` for the fields - # required for the tests. - yield project["path_with_namespace"] - finally: - (await gitlab_test_client.delete(f"/projects/{project['id']}")).raise_for_status() - - -@pytest.fixture(name="incarnation_gitlab_repository_in_v1") -async def create_incarnation_gitlab_repository_in_v1( - api_client: AsyncClient, - empty_incarnation_gitlab_repository: str, - template_repository: str, +@pytest.fixture +async def gitlab_incarnation_repository_in_v1( + foxops_client: AsyncClient, + gitlab_project_factory: Callable[[str], dict], + gitlab_template_repository: str, ): - response = await api_client.post( - "/incarnations", + incarnation_repo = gitlab_project_factory("incarnation")["path_with_namespace"] + + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, + "incarnation_repository": incarnation_repo, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -130,78 +119,4 @@ async def create_incarnation_gitlab_repository_in_v1( response.raise_for_status() incarnation = response.json() - return empty_incarnation_gitlab_repository, str(incarnation["id"]) - - -@pytest.fixture(name="template_repository") -async def create_template_gitlab_repository(gitlab_test_client: AsyncClient): - response = await gitlab_test_client.post("/projects", json={"name": f"template-{str(uuid.uuid4())}"}) - response.raise_for_status() - project = response.json() - try: - # TODO: considering returning `project`, annotated with a `TypedDict` for the fields - # required for the tests. - ( - await gitlab_test_client.post( - f"/projects/{project['id']}/repository/files/{quote_plus('fengine.yaml')}", - json={ - "encoding": "base64", - "content": base64.b64encode( - b""" -variables: - name: - type: str - description: The name of the person - - age: - type: int - description: The age of the person -""" - ).decode("utf-8"), - "commit_message": "Initial commit", - "branch": project["default_branch"], - }, - ) - ).raise_for_status() - - # VERSION v1.0.0 - ( - await gitlab_test_client.post( - f"/projects/{project['id']}/repository/files/{quote_plus('template/README.md')}", - json={ - "encoding": "base64", - "content": base64.b64encode(b"{{ name }} is of age {{ age }}").decode("utf-8"), - "commit_message": "Add template README", - "branch": project["default_branch"], - }, - ) - ).raise_for_status() - ( - await gitlab_test_client.post( - f"/projects/{project['id']}/repository/tags", - json={"tag_name": "v1.0.0", "ref": project["default_branch"]}, - ) - ).raise_for_status() - - # VERSION: v2.0.0 - ( - await gitlab_test_client.put( - f"/projects/{project['id']}/repository/files/{quote_plus('template/README.md')}", - json={ - "encoding": "base64", - "content": base64.b64encode(b"Hello {{ name }}, age: {{ age }}").decode("utf-8"), - "commit_message": "Change template README", - "branch": project["default_branch"], - }, - ) - ).raise_for_status() - ( - await gitlab_test_client.post( - f"/projects/{project['id']}/repository/tags", - json={"tag_name": "v2.0.0", "ref": project["default_branch"]}, - ) - ).raise_for_status() - - yield project["path_with_namespace"] - finally: - (await gitlab_test_client.delete(f"/projects/{project['id']}")).raise_for_status() + return incarnation_repo, str(incarnation["id"]) diff --git a/tests/e2e/hosters/test_gitlab.py b/tests/e2e/hosters/test_gitlab.py index ceff7b71..9cb5a649 100644 --- a/tests/e2e/hosters/test_gitlab.py +++ b/tests/e2e/hosters/test_gitlab.py @@ -11,7 +11,7 @@ from tenacity.wait import wait_fixed from foxops.hosters import Hoster, ReconciliationStatus -from foxops.hosters.gitlab import GitLab +from foxops.hosters.gitlab import GitlabHoster from foxops.hosters.types import MergeRequestStatus # mark all tests in this module as e2e @@ -77,7 +77,7 @@ async def create_test_repository(gitlab_test_client: AsyncClient) -> RepositoryT @pytest.fixture(name="test_gitlab_hoster") async def create_test_gitlab_hoster(gitlab_test_address: str, gitlab_test_user_token: str) -> Hoster: - return GitLab(gitlab_test_address, gitlab_test_user_token) + return GitlabHoster(gitlab_test_address, gitlab_test_user_token) async def should_return_success_reconciliation_status_for_default_branch_commit_without_pipeline( diff --git a/tests/e2e/test_incarnations_api.py b/tests/e2e/test_incarnations_api.py index 422f1562..9a66ba1f 100644 --- a/tests/e2e/test_incarnations_api.py +++ b/tests/e2e/test_incarnations_api.py @@ -1,9 +1,10 @@ import base64 from http import HTTPStatus +from typing import Callable from urllib.parse import quote_plus import pytest -from httpx import AsyncClient +from httpx import AsyncClient, Client from pytest_mock import MockFixture from tests.e2e.assertions import ( @@ -16,11 +17,16 @@ pytestmark = [pytest.mark.e2e, pytest.mark.api] -async def should_initialize_incarnation_in_root_of_empty_repository_when_creating_incarnation( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +@pytest.fixture +def gitlab_incarnation_repository(gitlab_project_factory: Callable[[str], dict]) -> str: + return gitlab_project_factory("incarnation")["path_with_namespace"] + + +async def test_post_incarnations_creates_incarnation_in_root_of_empty_repository( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # GIVEN @@ -28,11 +34,11 @@ async def should_initialize_incarnation_in_root_of_empty_repository_when_creatin template_data = {"name": "Jon", "age": "18"} # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, "template_repository_version": template_repository_version, "template_data": template_data, }, @@ -41,10 +47,10 @@ async def should_initialize_incarnation_in_root_of_empty_repository_when_creatin incarnation = response.json() # THEN - assert incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert incarnation["incarnation_repository"] == gitlab_incarnation_repository assert incarnation["target_directory"] == "." assert incarnation["status"] == mocker.ANY - assert incarnation["template_repository"] == template_repository + assert incarnation["template_repository"] == gitlab_template_repository assert incarnation["template_repository_version"] == template_repository_version assert incarnation["template_data"] == template_data @@ -56,25 +62,25 @@ async def should_initialize_incarnation_in_root_of_empty_repository_when_creatin assert incarnation["merge_request_status"] is None assert incarnation["template_repository_version_hash"] == mocker.ANY - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "README.md", "Jon is of age 18", ) -async def should_initialize_incarnation_in_root_of_repository_with_fvars_file_when_creating_incarnation( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +async def test_post_incarnations_creates_incarnation_in_root_of_repository_with_fvars_file( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # GIVEN ( - await gitlab_test_client.post( - f"/projects/{quote_plus(empty_incarnation_gitlab_repository)}/repository/files/{quote_plus('default.fvars')}", + gitlab_client.post( + f"/projects/{quote_plus(gitlab_incarnation_repository)}/repository/files/{quote_plus('default.fvars')}", json={ "encoding": "base64", "content": base64.b64encode(b"name=Jon").decode("utf-8"), @@ -85,11 +91,11 @@ async def should_initialize_incarnation_in_root_of_repository_with_fvars_file_wh ).raise_for_status() # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"age": 18}, }, @@ -98,31 +104,31 @@ async def should_initialize_incarnation_in_root_of_repository_with_fvars_file_wh incarnation = response.json() # THEN - assert incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert incarnation["incarnation_repository"] == gitlab_incarnation_repository assert incarnation["target_directory"] == "." assert incarnation["status"] == mocker.ANY assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_id"] is None - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "README.md", "Jon is of age 18", ) -async def should_initialize_incarnation_in_root_of_nonempty_incarnation_with_a_direct_commit( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +async def test_post_incarnations_creates_incarnation_in_root_of_nonempty_repository_with_a_direct_commit( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # GIVEN ( - await gitlab_test_client.post( - f"/projects/{quote_plus(empty_incarnation_gitlab_repository)}/repository/files/{quote_plus('test.md')}", + gitlab_client.post( + f"/projects/{quote_plus(gitlab_incarnation_repository)}/repository/files/{quote_plus('test.md')}", json={ "encoding": "base64", "content": base64.b64encode(b"Hello World").decode("utf-8"), @@ -133,11 +139,11 @@ async def should_initialize_incarnation_in_root_of_nonempty_incarnation_with_a_d ).raise_for_status() # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -146,32 +152,32 @@ async def should_initialize_incarnation_in_root_of_nonempty_incarnation_with_a_d incarnation = response.json() # THEN - assert incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert incarnation["incarnation_repository"] == gitlab_incarnation_repository assert incarnation["target_directory"] == "." assert incarnation["status"] == mocker.ANY assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_id"] is None assert incarnation["merge_request_status"] is None - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "README.md", "Jon is of age 18", ) -async def should_initialize_incarnation_in_root_of_nonempty_repository_with_fvars_file_when_creating_incarnation( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +async def test_post_incarnations_creates_incarnation_in_root_of_nonempty_repository_with_fvars_file( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # GIVEN ( - await gitlab_test_client.post( - f"/projects/{quote_plus(empty_incarnation_gitlab_repository)}/repository/files/{quote_plus('default.fvars')}", + gitlab_client.post( + f"/projects/{quote_plus(gitlab_incarnation_repository)}/repository/files/{quote_plus('default.fvars')}", json={ "encoding": "base64", "content": base64.b64encode(b"name=Jon").decode("utf-8"), @@ -181,8 +187,8 @@ async def should_initialize_incarnation_in_root_of_nonempty_repository_with_fvar ) ).raise_for_status() ( - await gitlab_test_client.post( - f"/projects/{quote_plus(empty_incarnation_gitlab_repository)}/repository/files/{quote_plus('test.md')}", + gitlab_client.post( + f"/projects/{quote_plus(gitlab_incarnation_repository)}/repository/files/{quote_plus('test.md')}", json={ "encoding": "base64", "content": base64.b64encode(b"Hello World").decode("utf-8"), @@ -193,11 +199,11 @@ async def should_initialize_incarnation_in_root_of_nonempty_repository_with_fvar ).raise_for_status() # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"age": 18}, }, @@ -206,58 +212,33 @@ async def should_initialize_incarnation_in_root_of_nonempty_repository_with_fvar incarnation = response.json() # THEN - assert incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert incarnation["incarnation_repository"] == gitlab_incarnation_repository assert incarnation["target_directory"] == "." assert incarnation["status"] == "success" assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_url"] == mocker.ANY - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "README.md", "Jon is of age 18", ) -async def should_err_in_initialization_if_variable_is_missing( - api_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, - mocker: MockFixture, -): - # WHEN - response = await api_client.post( - "/incarnations", - json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, - "template_repository_version": "v1.0.0", - "template_data": {"name": "Jon"}, # missing `age` variable - }, - ) - - # THEN - assert response.status_code == HTTPStatus.BAD_REQUEST - assert ( - "the template required the variables ['age', 'name'] but the provided template data for the incarnation where ['name']." - in response.json()["message"] - ) - - -async def should_initialize_incarnation_in_subdir_of_empty_repository_when_creating_incarnation( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +async def test_post_incarnations_creates_incarnation_in_subdir_of_empty_repository( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, + "incarnation_repository": gitlab_incarnation_repository, "target_directory": "subdir", - "template_repository": template_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -266,35 +247,84 @@ async def should_initialize_incarnation_in_subdir_of_empty_repository_when_creat incarnation = response.json() # THEN - assert incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert incarnation["incarnation_repository"] == gitlab_incarnation_repository assert incarnation["target_directory"] == "subdir" assert incarnation["status"] == mocker.ANY assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_url"] == mocker.ANY assert incarnation["merge_request_status"] is None - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "subdir/README.md", "Jon is of age 18", ) -async def should_initialize_incarnations_in_subdirs_of_empty_repository_when_creating_incarnation( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, +async def test_post_incarnations_returns_error_if_variable_is_missing( + foxops_client: AsyncClient, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, +): + # WHEN + response = await foxops_client.post( + "/api/incarnations", + json={ + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, + "template_repository_version": "v1.0.0", + "template_data": {"name": "Jon"}, # missing `age` variable + }, + ) + + # THEN + assert response.status_code == HTTPStatus.BAD_REQUEST + assert ( + "the template required the variables ['age', 'name'] but the provided template data " + "for the incarnation where ['name']." in response.json()["message"] + ) + + +async def test_post_incarnations_returns_error_if_template_repository_version_does_not_exist( + foxops_client: AsyncClient, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, +): + # GIVEN + template_repository_version = "vNon-existing" + template_data = {"name": "Jon", "age": "18"} + + # WHEN + response = await foxops_client.post( + "/api/incarnations", + json={ + "incarnation_repository": gitlab_incarnation_repository, + "template_repository": gitlab_template_repository, + "template_repository_version": template_repository_version, + "template_data": template_data, + }, + ) + + # THEN + assert response.status_code == HTTPStatus.BAD_REQUEST + assert "Revision 'vNon-existing' not found" in response.json()["message"] + + +async def test_multiple_post_incarnations_create_incarnations_in_subdirs_of_empty_repository( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_template_repository: str, + gitlab_incarnation_repository: str, mocker: MockFixture, ): # WHEN - subdir1_response = await api_client.post( - "/incarnations", + subdir1_response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, + "incarnation_repository": gitlab_incarnation_repository, "target_directory": "subdir1", - "template_repository": template_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -302,12 +332,12 @@ async def should_initialize_incarnations_in_subdirs_of_empty_repository_when_cre subdir1_response.raise_for_status() subdir1_incarnation = subdir1_response.json() - subdir2_response = await api_client.post( - "/incarnations", + subdir2_response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, + "incarnation_repository": gitlab_incarnation_repository, "target_directory": "subdir2", - "template_repository": template_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Ygritte", "age": 17}, }, @@ -317,43 +347,43 @@ async def should_initialize_incarnations_in_subdirs_of_empty_repository_when_cre # THEN assert subdir1_incarnation["id"] == 1 - assert subdir1_incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert subdir1_incarnation["incarnation_repository"] == gitlab_incarnation_repository assert subdir1_incarnation["target_directory"] == "subdir1" assert subdir1_incarnation["commit_url"] == mocker.ANY assert subdir1_incarnation["merge_request_url"] == mocker.ANY assert subdir2_incarnation["id"] == 2 - assert subdir2_incarnation["incarnation_repository"] == empty_incarnation_gitlab_repository + assert subdir2_incarnation["incarnation_repository"] == gitlab_incarnation_repository assert subdir2_incarnation["target_directory"] == "subdir2" assert subdir2_incarnation["commit_url"] == mocker.ANY assert subdir2_incarnation["merge_request_url"] == mocker.ANY - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "subdir1/README.md", "Jon is of age 18", ) - await assert_file_in_repository( - gitlab_test_client, - empty_incarnation_gitlab_repository, + assert_file_in_repository( + gitlab_client, + gitlab_incarnation_repository, "subdir2/README.md", "Ygritte is of age 17", ) -async def should_create_merge_request_when_file_changed_during_update( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], +async def test_put_incarnation_updates_incarnation_with_merge_request( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], mocker: MockFixture, ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 # WHEN - response = await api_client.put( - f"/incarnations/{incarnation_id}", + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ "template_repository_version": "v2.0.0", "template_data": {"name": "Jon", "age": 18}, @@ -370,9 +400,9 @@ async def should_create_merge_request_when_file_changed_during_update( assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_url"] == mocker.ANY - update_branch_name = await assert_update_merge_request_exists(gitlab_test_client, incarnation_repository) - await assert_file_in_repository( - gitlab_test_client, + update_branch_name = assert_update_merge_request_exists(gitlab_client, incarnation_repository) + assert_file_in_repository( + gitlab_client, incarnation_repository, "README.md", "Hello Jon, age: 18", @@ -380,16 +410,16 @@ async def should_create_merge_request_when_file_changed_during_update( ) -async def should_create_merge_request_when_file_changed_with_fvars_during_update( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], +async def test_put_incarnation_updates_incarnation_with_merge_request_when_fvars_changed( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], mocker: MockFixture, ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 ( - await gitlab_test_client.post( + gitlab_client.post( f"/projects/{quote_plus(incarnation_repository)}/repository/files/{quote_plus('default.fvars')}", json={ "encoding": "base64", @@ -401,8 +431,8 @@ async def should_create_merge_request_when_file_changed_with_fvars_during_update ).raise_for_status() # WHEN - response = await api_client.put( - f"/incarnations/{incarnation_id}", + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ "template_repository_version": "v2.0.0", "template_data": {"age": 18}, @@ -419,9 +449,9 @@ async def should_create_merge_request_when_file_changed_with_fvars_during_update assert incarnation["commit_url"] == mocker.ANY assert incarnation["merge_request_url"] == mocker.ANY - update_branch_name = await assert_update_merge_request_exists(gitlab_test_client, incarnation_repository) - await assert_file_in_repository( - gitlab_test_client, + update_branch_name = assert_update_merge_request_exists(gitlab_client, incarnation_repository) + assert_file_in_repository( + gitlab_client, incarnation_repository, "README.md", "Hello Jon, age: 18", @@ -429,21 +459,58 @@ async def should_create_merge_request_when_file_changed_with_fvars_during_update ) +async def test_put_incarnation_updates_incarnation_with_merge_request_and_automerge( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], + mocker: MockFixture, +): + # GIVEN + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 + + # WHEN + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", + json={ + "template_repository_version": "v2.0.0", + "template_data": {"name": "Jon", "age": 18}, + "automerge": True, + }, + ) + response.raise_for_status() + incarnation = response.json() + + # THEN + assert incarnation["incarnation_repository"] == incarnation_repository + assert incarnation["target_directory"] == "." + assert incarnation["status"] == "success" + assert incarnation["commit_url"] == mocker.ANY + assert incarnation["merge_request_url"] == mocker.ANY + assert incarnation["merge_request_status"] == "merged" + + assert_file_in_repository( + gitlab_client, + incarnation_repository, + "README.md", + "Hello Jon, age: 18", + ) + + @pytest.mark.parametrize( "automerge", [True, False], ) -async def should_present_conflict_in_merge_request_when_updating( +async def test_put_incarnation_creates_merge_request_with_conflicts( automerge: bool, - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], mocker: MockFixture, ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 ( - await gitlab_test_client.put( + gitlab_client.put( f"/projects/{quote_plus(incarnation_repository)}/repository/files/{quote_plus('README.md')}", json={ "encoding": "base64", @@ -457,8 +524,8 @@ async def should_present_conflict_in_merge_request_when_updating( ).raise_for_status() # WHEN - response = await api_client.put( - f"/incarnations/{incarnation_id}", + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ "template_repository_version": "v2.0.0", "template_data": {"name": "Jon", "age": 18}, @@ -476,88 +543,54 @@ async def should_present_conflict_in_merge_request_when_updating( assert incarnation["merge_request_url"] == mocker.ANY assert incarnation["merge_request_status"] == "open" - await assert_update_merge_request_with_conflicts_exists( - gitlab_test_client, + assert_update_merge_request_with_conflicts_exists( + gitlab_client, incarnation_repository, files_with_conflicts=["README.md"], ) -async def should_automerge_merge_request_when_flag_is_true( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], - mocker: MockFixture, +async def test_put_incarnation_returns_error_if_the_previous_one_has_not_been_merged( + foxops_client: AsyncClient, + gitlab_incarnation_repository_in_v1: tuple[str, str], ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 - - # WHEN - response = await api_client.put( - f"/incarnations/{incarnation_id}", + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ - "template_repository_version": "v2.0.0", - "template_data": {"name": "Jon", "age": 18}, - "automerge": True, + "template_data": {"name": "Jon", "age": 19}, + "automerge": False, }, ) response.raise_for_status() - incarnation = response.json() - - # THEN - assert incarnation["incarnation_repository"] == incarnation_repository - assert incarnation["target_directory"] == "." - assert incarnation["status"] == "success" - assert incarnation["commit_url"] == mocker.ANY - assert incarnation["merge_request_url"] == mocker.ANY - assert incarnation["merge_request_status"] == "merged" - - await assert_file_in_repository( - gitlab_test_client, - incarnation_repository, - "README.md", - "Hello Jon, age: 18", - ) - - -async def should_err_initialize_incarnation_if_template_repository_version_does_not_exist( - api_client: AsyncClient, - template_repository: str, - empty_incarnation_gitlab_repository: str, -): - # GIVEN - template_repository_version = "vNon-existing" - template_data = {"name": "Jon", "age": "18"} # WHEN - response = await api_client.post( - "/incarnations", + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, - "template_repository": template_repository, - "template_repository_version": template_repository_version, - "template_data": template_data, + "template_repository_version": "v2.0.0", + "automerge": False, }, ) # THEN - assert response.status_code == HTTPStatus.BAD_REQUEST - assert "Revision 'vNon-existing' not found" in response.json()["message"] + assert response.status_code == HTTPStatus.CONFLICT -async def test_list_incarnations_should_return_all_incarnations( - api_client: AsyncClient, - empty_incarnation_gitlab_repository: str, - template_repository: str, +async def test_get_incarnations_returns_all_incarnations( + foxops_client: AsyncClient, + gitlab_incarnation_repository: str, + gitlab_template_repository: str, ): # GIVEN for i in range(2): - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, + "incarnation_repository": gitlab_incarnation_repository, "target_directory": f"subdir{i}", - "template_repository": template_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -565,7 +598,7 @@ async def test_list_incarnations_should_return_all_incarnations( response.raise_for_status() # WHEN - response = await api_client.get("/incarnations") + response = await foxops_client.get("/api/incarnations") response.raise_for_status() incarnations = response.json() @@ -576,7 +609,7 @@ async def test_list_incarnations_should_return_all_incarnations( inc1 = [inc for inc in incarnations if inc["target_directory"] == "subdir1"][0] assert inc0["id"] is not None - assert inc0["incarnation_repository"] == empty_incarnation_gitlab_repository + assert inc0["incarnation_repository"] == gitlab_incarnation_repository assert inc0["target_directory"] == "subdir0" assert inc0["revision"] == 1 @@ -592,19 +625,19 @@ async def test_list_incarnations_should_return_all_incarnations( assert inc1 -async def test_list_incarnations_returns_single_incarnation_when_queried( - api_client: AsyncClient, - empty_incarnation_gitlab_repository: str, - template_repository: str, +async def test_get_incarnations_returns_single_incarnation_when_queried( + foxops_client: AsyncClient, + gitlab_incarnation_repository: str, + gitlab_template_repository: str, ): # GIVEN for i in range(2): - response = await api_client.post( - "/incarnations", + response = await foxops_client.post( + "/api/incarnations", json={ - "incarnation_repository": empty_incarnation_gitlab_repository, + "incarnation_repository": gitlab_incarnation_repository, "target_directory": f"subdir{i}", - "template_repository": template_repository, + "template_repository": gitlab_template_repository, "template_repository_version": "v1.0.0", "template_data": {"name": "Jon", "age": 18}, }, @@ -612,9 +645,9 @@ async def test_list_incarnations_returns_single_incarnation_when_queried( response.raise_for_status() # WHEN - response = await api_client.get( - "/incarnations", - params={"incarnation_repository": empty_incarnation_gitlab_repository, "target_directory": "subdir1"}, + response = await foxops_client.get( + "/api/incarnations", + params={"incarnation_repository": gitlab_incarnation_repository, "target_directory": "subdir1"}, ) response.raise_for_status() @@ -625,56 +658,28 @@ async def test_list_incarnations_returns_single_incarnation_when_queried( assert incarnations[0]["target_directory"] == "subdir1" -async def test_list_incarnations_returns_not_found_if_given_incarnation_does_not_exist( - api_client: AsyncClient, +async def test_get_incarnations_returns_error_if_given_incarnation_does_not_exist( + foxops_client: AsyncClient, ): # GIVEN incarnation_repo = "non-existing" # WHEN - response = await api_client.get("/incarnations", params={"incarnation_repository": incarnation_repo}) + response = await foxops_client.get("/api/incarnations", params={"incarnation_repository": incarnation_repo}) # THEN assert response.status_code == HTTPStatus.NOT_FOUND -async def test_update_incarnation_should_fail_if_the_previous_one_has_not_been_merged( - api_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], -): - # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 - response = await api_client.put( - f"/incarnations/{incarnation_id}", - json={ - "template_data": {"name": "Jon", "age": 19}, - "automerge": False, - }, - ) - response.raise_for_status() - - # WHEN - response = await api_client.put( - f"/incarnations/{incarnation_id}", - json={ - "template_repository_version": "v2.0.0", - "automerge": False, - }, - ) - - # THEN - assert response.status_code == HTTPStatus.CONFLICT - - -async def test_delete_incarnation_succeeds_when_there_are_changes( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], +async def test_delete_incarnation_removes_incarnation_when_there_are_changes( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 - response = await api_client.put( - f"/incarnations/{incarnation_id}", + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 + response = await foxops_client.put( + f"/api/incarnations/{incarnation_id}", json={ "template_repository_version": "v2.0.0", "template_data": {"name": "Jon", "age": 18}, @@ -684,21 +689,22 @@ async def test_delete_incarnation_succeeds_when_there_are_changes( response.raise_for_status() # WHEN - response = await api_client.delete(f"/incarnations/{incarnation_id}") + response = await foxops_client.delete(f"/api/incarnations/{incarnation_id}") response.raise_for_status() # THEN assert response.status_code == HTTPStatus.NO_CONTENT + assert (await foxops_client.get(f"/api/incarnations/{incarnation_id}")).status_code == HTTPStatus.NOT_FOUND -async def test_reset_incarnation_succeeds( - api_client: AsyncClient, - gitlab_test_client: AsyncClient, - incarnation_gitlab_repository_in_v1: tuple[str, str], +async def test_post_incarnation_reset_creates_merge_request_that_resets_incarnation( + foxops_client: AsyncClient, + gitlab_client: Client, + gitlab_incarnation_repository_in_v1: tuple[str, str], ): # GIVEN - incarnation_repository, incarnation_id = incarnation_gitlab_repository_in_v1 - response = await gitlab_test_client.put( + incarnation_repository, incarnation_id = gitlab_incarnation_repository_in_v1 + response = gitlab_client.put( f"/projects/{quote_plus(incarnation_repository)}/repository/files/{quote_plus('README.md')}", json={ "encoding": "base64", @@ -712,7 +718,7 @@ async def test_reset_incarnation_succeeds( response.raise_for_status() # WHEN - response = await api_client.post(f"/incarnations/{incarnation_id}/reset") + response = await foxops_client.post(f"/api/incarnations/{incarnation_id}/reset") response.raise_for_status() response_data = response.json() @@ -723,7 +729,7 @@ async def test_reset_incarnation_succeeds( assert response_data["merge_request_id"] is not None assert response_data["merge_request_url"].startswith("http") - response = await gitlab_test_client.get( + response = gitlab_client.get( f"/projects/{quote_plus(incarnation_repository)}/merge_requests/{response_data['merge_request_id']}" ) response.raise_for_status() diff --git a/tests/hosters/test_local.py b/tests/hosters/test_local.py index e4d6ad06..4b2cff8a 100644 --- a/tests/hosters/test_local.py +++ b/tests/hosters/test_local.py @@ -161,7 +161,7 @@ async def test_merge_request_returns_commit_id_of_source_branch(local_hoster): # THEN assert mr_commit_sha == commit_sha - assert mr_id == "0" + assert mr_id == "1" assert await local_hoster.get_merge_request_status(repo_name, mr_id) == MergeRequestStatus.OPEN @@ -195,7 +195,7 @@ async def test_merge_request_supports_automerge(local_hoster): # THEN assert mr_commit_sha == commit_sha - assert mr_id == "0" + assert mr_id == "1" assert await local_hoster.get_merge_request_status(repo_name, mr_id) == MergeRequestStatus.MERGED async with local_hoster.cloned_repository(repo_name) as repo: diff --git a/tests/routers/test_incarnations.py b/tests/routers/test_incarnations.py index f65a9193..5331f221 100644 --- a/tests/routers/test_incarnations.py +++ b/tests/routers/test_incarnations.py @@ -88,7 +88,7 @@ async def test_api_get_incarnations_returns_incarnations_from_inventory( "incarnation_repository": "test", "target_directory": "test", "commit_sha": "commit_sha", - "commit_url": "https://nonsense.com/test/-/commit/commit_sha", + "commit_url": mocker.ANY, "merge_request_id": None, "merge_request_url": None, "created_at": mocker.ANY, diff --git a/tests/services/test_change.py b/tests/services/test_change.py index b2a97cf2..88234da7 100644 --- a/tests/services/test_change.py +++ b/tests/services/test_change.py @@ -546,7 +546,7 @@ async def test_update_incomplete_change_raises_exception_for_pushed_merge_reques await change_service._change_repository.update_merge_request_id(change.id, None) # remove merge request - local_hoster._merge_requests[initialized_incarnation.incarnation_repository] = [] + local_hoster._mr_manager(initialized_incarnation.incarnation_repository).delete(int(change.merge_request_id)) # THEN with pytest.raises(CannotRepairChangeException): diff --git a/tests/test_settings.py b/tests/test_settings.py deleted file mode 100644 index 03eb684e..00000000 --- a/tests/test_settings.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from _pytest.monkeypatch import MonkeyPatch - -from foxops.hosters.gitlab import GitLabSettings -from foxops.settings import Settings - - -@pytest.mark.filterwarnings('ignore:directory "/var/run/secrets/foxops" does not exist') -def test_settings_can_load_config_from_env(monkeypatch: MonkeyPatch): - # GIVEN - monkeypatch.setenv("FOXOPS_GITLAB_ADDRESS", "dummy") - monkeypatch.setenv("FOXOPS_GITLAB_TOKEN", "dummy") - monkeypatch.setenv("FOXOPS_STATIC_TOKEN", "dummy") - - # WHEN - gsettings: GitLabSettings = GitLabSettings() # type: ignore - settings = Settings() # type: ignore - - # THEN - assert gsettings.address == "dummy" - assert gsettings.token.get_secret_value() == "dummy" - assert settings.static_token.get_secret_value() == "dummy"