From ccdec9b9f6f7ba75758217dd8e7c4a79b3998b70 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Tue, 28 Nov 2023 23:34:16 +0200 Subject: [PATCH] [DPE-2904] Switch to self hosted runners (#285) * DO NOT MERGE test self-hsoted runners * Remove groups * Add group markers * fix pythonpath * Switch to relative imports * Switch to binary psycopg2 for test deps * Conditionally skip tests * Bump timeouts * Skipping fixtures and bump in timeouts * Juju matrix * Revert "Juju matrix" This reverts commit 24c9f8c1d8754f426833594096dc5b35c5ad6cdc. * Juju2 job * Revert backup changes * Bump timeouts * Switch to dpw 6 * More timeout bumps * Increase timeouts * Bump dpw * Depend on unit tests * Increase timeouts * Skip additional backup test when secrets are not accessible * Even more timouts * Retry getting password if no primary was found * Timeouts * Bump dpw * Try to preenable plpython * Add markers and cleanup tox ini * Marks * Switch to mainline workflow * Bump actions * Matrix integration * Update contributing * Use the right workflow * Bump action --------- Co-authored-by: Carl Csaposs --- .github/workflows/ci.yaml | 111 ++------ .github/workflows/release.yaml | 4 +- .github/workflows/sync_issue_to_jira.yaml | 2 +- CONTRIBUTING.md | 4 +- poetry.lock | 99 +++++-- pyproject.toml | 10 +- tests/integration/__init__.py | 2 + tests/integration/ha_tests/__init__.py | 2 + tests/integration/ha_tests/conftest.py | 4 +- tests/integration/ha_tests/helpers.py | 4 +- .../integration/ha_tests/test_replication.py | 14 +- .../ha_tests/test_restore_cluster.py | 18 +- .../integration/ha_tests/test_self_healing.py | 33 ++- tests/integration/ha_tests/test_upgrade.py | 24 +- .../ha_tests/test_upgrade_from_stable.py | 19 +- tests/integration/helpers.py | 15 +- tests/integration/juju_.py | 10 + tests/integration/markers.py | 10 + tests/integration/new_relations/__init__.py | 2 + .../new_relations/test_new_relations.py | 22 +- tests/integration/test_backups.py | 7 +- tests/integration/test_charm.py | 27 +- tests/integration/test_db.py | 15 +- tests/integration/test_db_admin.py | 9 +- tests/integration/test_password_rotation.py | 8 +- tests/integration/test_plugins.py | 5 +- tests/integration/test_tls.py | 7 +- tox.ini | 250 +----------------- 28 files changed, 288 insertions(+), 449 deletions(-) create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/ha_tests/__init__.py create mode 100644 tests/integration/juju_.py create mode 100644 tests/integration/markers.py create mode 100644 tests/integration/new_relations/__init__.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bc00363a32..81f869c3ae 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -22,7 +22,7 @@ on: jobs: lint: name: Lint - uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v5.1.2 + uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v6.3.2 unit-test: name: Unit test charm @@ -42,100 +42,35 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charms_with_cache.yaml@v5.1.2 + uses: canonical/data-platform-workflows/.github/workflows/build_charms_with_cache.yaml@v6.3.2 permissions: actions: write # Needed to manage GitHub Actions cache - gh-hosted-integration-test: + + integration-test: strategy: fail-fast: false matrix: - tox-environment: - - backup-integration - - charm-integration - - database-relation-integration - - db-relation-integration - - db-admin-relation-integration - - ha-replication-integration - - ha-self-healing-integration - - ha-restore-cluster-integration - - password-rotation-integration - - plugins-integration - - tls-integration - - upgrade-integration - - upgrade-from-stable-integration - juju-snap-channel: ["2.9/stable", "3.1/stable"] - exclude: - # Admin tests need landscape on juju 2 - - tox-environment: db-admin-relation-integration - juju-snap-channel: "3.1/stable" - include: - - juju-snap-channel: "3.1/stable" - agent-version: "3.1.6" - libjuju-version: "3.2.2" - exclude-mark: "juju2" - - juju-snap-channel: "2.9/stable" - agent-version: "2.9.45" - libjuju-version: "2.9.45.0" - exclude-mark: "juju3" - name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.tox-environment }} + juju: + - agent: 2.9.45 + libjuju: ^2 + - agent: 3.1.6 + name: Integration test charm | ${{ matrix.juju.agent }} needs: - lint - unit-test - build - runs-on: ubuntu-latest - timeout-minutes: 120 - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install tox & poetry - run: | - pipx install tox - pipx install poetry - - name: Free up disk space - run: | - # From https://github.com/actions/runner-images/issues/2840#issuecomment-790492173 - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf /usr/local/share/boost - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - - name: Setup operator environment - uses: charmed-kubernetes/actions-operator@main - with: - provider: lxd - bootstrap-options: "--agent-version ${{ matrix.agent-version }}" - juju-channel: ${{ matrix.juju-snap-channel }} - - name: Update python-libjuju version - if: ${{ matrix.juju-snap-channel == '2.9/stable' }} - run: poetry add --lock --group integration juju@'${{ matrix.libjuju-version }}' - - name: Download packed charm(s) - uses: actions/download-artifact@v3 - with: - name: ${{ needs.build.outputs.artifact-name }} - - name: Select test stability level - id: select-test-stability - run: | - if [[ "${{ github.event_name }}" == "schedule" ]] - then - echo Running unstable and stable tests - echo "mark_expression=" >> "$GITHUB_OUTPUT" - else - echo Skipping unstable tests - echo "mark_expression=and not unstable" >> "$GITHUB_OUTPUT" - fi - - name: Select test secret usage - id: select-test-secrets - if: ${{ github.event.pull_request.head.repo.full_name != 'canonical/postgresql-operator' }} - run: | - echo Skipping tests using secrets - echo "mark_secrets=and not uses_secrets" >> "$GITHUB_OUTPUT" - - name: Run integration tests - run: tox run -e ${{ matrix.tox-environment }} -- -m 'not ${{ matrix.exclude-mark }} ${{ steps.select-test-secrets.outputs.mark_secrets }} ${{ steps.select-test-stability.outputs.mark_expression }}' --keep-models - env: - SECRETS_FROM_GITHUB: | - { - "AWS_ACCESS_KEY": "${{ secrets.AWS_ACCESS_KEY }}", - "AWS_SECRET_KEY": "${{ secrets.AWS_SECRET_KEY }}", - "GCP_ACCESS_KEY": "${{ secrets.GCP_ACCESS_KEY }}", - "GCP_SECRET_KEY": "${{ secrets.GCP_SECRET_KEY }}", - } + uses: canonical/data-platform-workflows/.github/workflows/integration_test_charm_self_hosted.yaml@v6.3.2 + with: + artifact-name: ${{ needs.build.outputs.artifact-name }} + cloud: lxd + juju-agent-version: ${{ matrix.juju.agent }} + libjuju-version-constraint: ${{ matrix.juju.libjuju }} + secrets: + integration-test: | + { + "AWS_ACCESS_KEY": "${{ secrets.AWS_ACCESS_KEY }}", + "AWS_SECRET_KEY": "${{ secrets.AWS_SECRET_KEY }}", + "GCP_ACCESS_KEY": "${{ secrets.GCP_ACCESS_KEY }}", + "GCP_SECRET_KEY": "${{ secrets.GCP_SECRET_KEY }}", + } diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4d1f3bb997..a87f7abb69 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -24,14 +24,14 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm_without_cache.yaml@v5.1.2 + uses: canonical/data-platform-workflows/.github/workflows/build_charm_without_cache.yaml@v6.3.2 release: name: Release charm needs: - ci-tests - build - uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v5.1.2 + uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v6.3.2 with: channel: 14/edge artifact-name: ${{ needs.build.outputs.artifact-name }} diff --git a/.github/workflows/sync_issue_to_jira.yaml b/.github/workflows/sync_issue_to_jira.yaml index 3ce4c6289e..806ac65b3e 100644 --- a/.github/workflows/sync_issue_to_jira.yaml +++ b/.github/workflows/sync_issue_to_jira.yaml @@ -9,7 +9,7 @@ on: jobs: sync: name: Sync GitHub issue to Jira - uses: canonical/data-platform-workflows/.github/workflows/sync_issue_to_jira.yaml@v5.1.2 + uses: canonical/data-platform-workflows/.github/workflows/sync_issue_to_jira.yaml@v6.3.2 with: jira-base-url: https://warthogs.atlassian.net jira-project-key: DPE diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fbaae51d01..d36e38089b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -43,9 +43,11 @@ tox # runs 'lint' and 'unit' environments Build the charm in this git repository using: ```shell -charmcraft pack +tox -e build ``` +The tox build environment assumes that there is a preinstalled [poetry](https://python-poetry.org/) on the system. + ### Deploy ```bash diff --git a/poetry.lock b/poetry.lock index 68daf50180..eb43369890 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1137,23 +1137,84 @@ files = [ ] [[package]] -name = "psycopg2" +name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] [[package]] @@ -1408,8 +1469,8 @@ develop = false [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v5.1.2" -resolved_reference = "3cc668dc10fa7316da9600c296ca7640d7d83222" +reference = "v6.3.2" +resolved_reference = "a8c3b5db3bf4c00eb2d8f1b01b7a0d3912ed356d" subdirectory = "python/pytest_plugins/github_secrets" [[package]] @@ -1446,8 +1507,8 @@ pyyaml = "*" [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v5.1.2" -resolved_reference = "3cc668dc10fa7316da9600c296ca7640d7d83222" +reference = "v6.3.2" +resolved_reference = "a8c3b5db3bf4c00eb2d8f1b01b7a0d3912ed356d" subdirectory = "python/pytest_plugins/pytest_operator_cache" [[package]] @@ -1465,8 +1526,8 @@ pytest = "*" [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v5.1.2" -resolved_reference = "3cc668dc10fa7316da9600c296ca7640d7d83222" +reference = "v6.3.2" +resolved_reference = "a8c3b5db3bf4c00eb2d8f1b01b7a0d3912ed356d" subdirectory = "python/pytest_plugins/pytest_operator_groups" [[package]] @@ -1997,4 +2058,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "49fde8e1037762b6d4de090e3d39a755086a75a406dc40c7ddedbca918f8b491" +content-hash = "6ee822d4d89cdd3bdb139fc3789226ed9759d35c1987a556bf0a7eaa59e27cb9" diff --git a/pyproject.toml b/pyproject.toml index f1258e4d5b..71d88759b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ coverage = {extras = ["toml"], version = "^7.3.2"} pytest = "^7.4.0" pytest-asyncio = "^0.21.1" jsonschema = "^4.19.1" -psycopg2 = {version = "^2.9.9", extras = ["binary"]} +psycopg2-binary = "^2.9.9" jinja2 = "^3.1.2" [tool.poetry.group.integration] @@ -65,16 +65,16 @@ optional = true [tool.poetry.group.integration.dependencies] pytest = "^7.4.0" -pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v5.1.2", subdirectory = "python/pytest_plugins/github_secrets"} +pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v6.3.2", subdirectory = "python/pytest_plugins/github_secrets"} pytest-operator = "^0.29.0" -pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v5.1.2", subdirectory = "python/pytest_plugins/pytest_operator_cache"} -pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v5.1.2", subdirectory = "python/pytest_plugins/pytest_operator_groups"} +pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v6.3.2", subdirectory = "python/pytest_plugins/pytest_operator_cache"} +pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v6.3.2", subdirectory = "python/pytest_plugins/pytest_operator_groups"} juju = "^3.2.2" boto3 = "^1.28.70" tenacity = "^8.2.3" landscape-api-py3 = "^0.9.0" mailmanclient = "^3.3.5" -psycopg2 = {version = "^2.9.9", extras = ["binary"]} +psycopg2-binary = "^2.9.9" # Testing tools configuration [tool.coverage.run] diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000000..db3bfe1a65 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/tests/integration/ha_tests/__init__.py b/tests/integration/ha_tests/__init__.py new file mode 100644 index 0000000000..db3bfe1a65 --- /dev/null +++ b/tests/integration/ha_tests/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/tests/integration/ha_tests/conftest.py b/tests/integration/ha_tests/conftest.py index 1a8d8f1192..83836743da 100644 --- a/tests/integration/ha_tests/conftest.py +++ b/tests/integration/ha_tests/conftest.py @@ -7,7 +7,8 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_delay, wait_fixed -from tests.integration.ha_tests.helpers import ( +from ..helpers import run_command_on_unit +from .helpers import ( APPLICATION_NAME, ORIGINAL_RESTART_CONDITION, RESTART_CONDITION, @@ -18,7 +19,6 @@ get_postgresql_parameter, update_restart_condition, ) -from tests.integration.helpers import run_command_on_unit @pytest.fixture() diff --git a/tests/integration/ha_tests/helpers.py b/tests/integration/ha_tests/helpers.py index e4b4010309..276e0a1b2d 100644 --- a/tests/integration/ha_tests/helpers.py +++ b/tests/integration/ha_tests/helpers.py @@ -20,7 +20,7 @@ wait_fixed, ) -from tests.integration.helpers import db_connect, get_unit_address, run_command_on_unit +from ..helpers import db_connect, get_unit_address, run_command_on_unit APPLICATION_NAME = "postgresql-test-app" METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) @@ -753,7 +753,7 @@ async def add_unit_with_storage(ops_test, app, storage): return_code, _, _ = await ops_test.juju(*add_unit_cmd) assert return_code == 0, "Failed to add unit with storage" async with ops_test.fast_forward(): - await ops_test.model.wait_for_idle(apps=[app], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[app], status="active", timeout=1500) assert ( len(ops_test.model.applications[app].units) == expected_units ), "New unit not added to model" diff --git a/tests/integration/ha_tests/test_replication.py b/tests/integration/ha_tests/test_replication.py index a579aad58b..f4a985b670 100644 --- a/tests/integration/ha_tests/test_replication.py +++ b/tests/integration/ha_tests/test_replication.py @@ -6,7 +6,8 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_delay, wait_fixed -from tests.integration.ha_tests.helpers import ( +from ..helpers import CHARM_SERIES, db_connect, scale_application +from .helpers import ( APPLICATION_NAME, app_name, are_writes_increasing, @@ -16,9 +17,9 @@ get_primary, start_continuous_writes, ) -from tests.integration.helpers import CHARM_SERIES, db_connect, scale_application +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_build_and_deploy(ops_test: OpsTest) -> None: """Build and deploy three unit of PostgreSQL.""" @@ -48,9 +49,10 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: if wait_for_apps: async with ops_test.fast_forward(): - await ops_test.model.wait_for_idle(status="active", timeout=1000) + await ops_test.model.wait_for_idle(status="active", timeout=1500) +@pytest.mark.group(1) async def test_reelection(ops_test: OpsTest, continuous_writes, primary_start_timeout) -> None: """Kill primary unit, check reelection.""" app = await app_name(ops_test) @@ -88,6 +90,7 @@ async def test_reelection(ops_test: OpsTest, continuous_writes, primary_start_ti await check_writes(ops_test) +@pytest.mark.group(1) async def test_consistency(ops_test: OpsTest, continuous_writes) -> None: """Write to primary, read data from secondaries (check consistency).""" # Locate primary unit. @@ -104,6 +107,7 @@ async def test_consistency(ops_test: OpsTest, continuous_writes) -> None: await check_writes(ops_test) +@pytest.mark.group(1) async def test_no_data_replicated_between_clusters(ops_test: OpsTest, continuous_writes) -> None: """Check that writes in one cluster are not replicated to another cluster.""" # Locate primary unit. @@ -122,7 +126,9 @@ async def test_no_data_replicated_between_clusters(ops_test: OpsTest, continuous series=CHARM_SERIES, config={"profile": "testing"}, ) - await ops_test.model.wait_for_idle(apps=[new_cluster_app], status="active") + await ops_test.model.wait_for_idle( + apps=[new_cluster_app], status="active", timeout=1500 + ) # Start an application that continuously writes data to the database. await start_continuous_writes(ops_test, app) diff --git a/tests/integration/ha_tests/test_restore_cluster.py b/tests/integration/ha_tests/test_restore_cluster.py index ce9fd99f86..1d11d4d5e5 100644 --- a/tests/integration/ha_tests/test_restore_cluster.py +++ b/tests/integration/ha_tests/test_restore_cluster.py @@ -6,13 +6,7 @@ import pytest from pytest_operator.plugin import OpsTest -from tests.integration.ha_tests.helpers import ( - add_unit_with_storage, - get_patroni_cluster, - reused_full_cluster_recovery_storage, - storage_id, -) -from tests.integration.helpers import ( +from ..helpers import ( CHARM_SERIES, db_connect, get_password, @@ -20,6 +14,12 @@ get_unit_address, set_password, ) +from .helpers import ( + add_unit_with_storage, + get_patroni_cluster, + reused_full_cluster_recovery_storage, + storage_id, +) FIRST_APPLICATION = "first-cluster" SECOND_APPLICATION = "second-cluster" @@ -29,6 +29,7 @@ charm = None +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_build_and_deploy(ops_test: OpsTest) -> None: """Build and deploy two PostgreSQL clusters.""" @@ -54,7 +55,7 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: config={"profile": "testing"}, ) - await ops_test.model.wait_for_idle(status="active", timeout=1000) + await ops_test.model.wait_for_idle(status="active", timeout=1500) # TODO have a better way to bootstrap clusters with existing storage primary = await get_primary( @@ -66,6 +67,7 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: await ops_test.model.destroy_unit(second_primary) +@pytest.mark.group(1) async def test_cluster_restore(ops_test): """Recreates the cluster from storage volumes.""" # Write some data. diff --git a/tests/integration/ha_tests/test_self_healing.py b/tests/integration/ha_tests/test_self_healing.py index bb68fa8541..dc9c4745f5 100644 --- a/tests/integration/ha_tests/test_self_healing.py +++ b/tests/integration/ha_tests/test_self_healing.py @@ -8,8 +8,16 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_delay, wait_fixed -from tests.integration.ha_tests.conftest import APPLICATION_NAME -from tests.integration.ha_tests.helpers import ( +from ..helpers import ( + CHARM_SERIES, + db_connect, + get_machine_from_unit, + get_password, + get_unit_address, + run_command_on_unit, +) +from .conftest import APPLICATION_NAME +from .helpers import ( METADATA, ORIGINAL_RESTART_CONDITION, add_unit_with_storage, @@ -43,14 +51,6 @@ update_restart_condition, wait_network_restore, ) -from tests.integration.helpers import ( - CHARM_SERIES, - db_connect, - get_machine_from_unit, - get_password, - get_unit_address, - run_command_on_unit, -) logger = logging.getLogger(__name__) @@ -60,6 +60,7 @@ DB_PROCESSES = [POSTGRESQL_PROCESS, PATRONI_PROCESS] +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_build_and_deploy(ops_test: OpsTest) -> None: """Build and deploy three unit of PostgreSQL.""" @@ -90,9 +91,10 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: if wait_for_apps: async with ops_test.fast_forward(): - await ops_test.model.wait_for_idle(status="active", timeout=1000) + await ops_test.model.wait_for_idle(status="active", timeout=1500) +@pytest.mark.group(1) async def test_storage_re_use(ops_test, continuous_writes): """Verifies that database units with attached storage correctly repurpose storage. @@ -109,7 +111,7 @@ async def test_storage_re_use(ops_test, continuous_writes): # removing the only replica can be disastrous if len(ops_test.model.applications[app].units) < 2: await ops_test.model.applications[app].add_unit(count=1) - await ops_test.model.wait_for_idle(apps=[app], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[app], status="active", timeout=1500) # Start an application that continuously writes data to the database. await start_continuous_writes(ops_test, app) @@ -139,6 +141,7 @@ async def test_storage_re_use(ops_test, continuous_writes): ), "new instance not up to date." +@pytest.mark.group(1) @pytest.mark.parametrize("process", DB_PROCESSES) async def test_kill_db_process( ops_test: OpsTest, process: str, continuous_writes, primary_start_timeout @@ -166,6 +169,7 @@ async def test_kill_db_process( await is_cluster_updated(ops_test, primary_name) +@pytest.mark.group(1) @pytest.mark.parametrize("process", DB_PROCESSES) async def test_freeze_db_process( ops_test: OpsTest, process: str, continuous_writes, primary_start_timeout @@ -203,6 +207,7 @@ async def test_freeze_db_process( await is_cluster_updated(ops_test, primary_name) +@pytest.mark.group(1) @pytest.mark.parametrize("process", DB_PROCESSES) async def test_restart_db_process( ops_test: OpsTest, process: str, continuous_writes, primary_start_timeout @@ -230,6 +235,7 @@ async def test_restart_db_process( await is_cluster_updated(ops_test, primary_name) +@pytest.mark.group(1) @pytest.mark.parametrize("process", DB_PROCESSES) @pytest.mark.parametrize("signal", ["SIGTERM", "SIGKILL"]) async def test_full_cluster_restart( @@ -299,6 +305,7 @@ async def test_full_cluster_restart( await check_writes(ops_test) +@pytest.mark.group(1) @pytest.mark.unstable async def test_forceful_restart_without_data_and_transaction_logs( ops_test: OpsTest, @@ -374,6 +381,7 @@ async def test_forceful_restart_without_data_and_transaction_logs( await is_cluster_updated(ops_test, primary_name) +@pytest.mark.group(1) @pytest.mark.unstable async def test_network_cut(ops_test: OpsTest, continuous_writes, primary_start_timeout): """Completely cut and restore network.""" @@ -458,6 +466,7 @@ async def test_network_cut(ops_test: OpsTest, continuous_writes, primary_start_t await is_cluster_updated(ops_test, primary_name) +@pytest.mark.group(1) @pytest.mark.unstable async def test_network_cut_without_ip_change( ops_test: OpsTest, continuous_writes, primary_start_timeout diff --git a/tests/integration/ha_tests/test_upgrade.py b/tests/integration/ha_tests/test_upgrade.py index c58572428f..26ef390e4a 100644 --- a/tests/integration/ha_tests/test_upgrade.py +++ b/tests/integration/ha_tests/test_upgrade.py @@ -11,25 +11,26 @@ import pytest from pytest_operator.plugin import OpsTest -from tests.integration.ha_tests.helpers import ( - APPLICATION_NAME, - are_writes_increasing, - check_writes, - start_continuous_writes, -) -from tests.integration.helpers import ( +from ..helpers import ( DATABASE_APP_NAME, count_switchovers, get_leader_unit, get_primary, ) -from tests.integration.new_relations.helpers import get_application_relation_data +from ..new_relations.helpers import get_application_relation_data +from .helpers import ( + APPLICATION_NAME, + are_writes_increasing, + check_writes, + start_continuous_writes, +) logger = logging.getLogger(__name__) -TIMEOUT = 5 * 60 +TIMEOUT = 600 +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_deploy_latest(ops_test: OpsTest) -> None: """Simple test to ensure that the PostgreSQL and application charms get deployed.""" @@ -47,11 +48,12 @@ async def test_deploy_latest(ops_test: OpsTest) -> None: logger.info("Wait for applications to become active") async with ops_test.fast_forward(): await ops_test.model.wait_for_idle( - apps=[DATABASE_APP_NAME, APPLICATION_NAME], status="active", timeout=1000 + apps=[DATABASE_APP_NAME, APPLICATION_NAME], status="active", timeout=1500 ) assert len(ops_test.model.applications[DATABASE_APP_NAME].units) == 3 +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_pre_upgrade_check(ops_test: OpsTest) -> None: """Test that the pre-upgrade-check action runs successfully.""" @@ -64,6 +66,7 @@ async def test_pre_upgrade_check(ops_test: OpsTest) -> None: await action.wait() +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_upgrade_from_edge(ops_test: OpsTest, continuous_writes) -> None: # Start an application that continuously writes data to the database. @@ -113,6 +116,7 @@ async def test_upgrade_from_edge(ops_test: OpsTest, continuous_writes) -> None: ) <= 2, "Number of switchovers is greater than 2" +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_fail_and_rollback(ops_test, continuous_writes) -> None: # Start an application that continuously writes data to the database. diff --git a/tests/integration/ha_tests/test_upgrade_from_stable.py b/tests/integration/ha_tests/test_upgrade_from_stable.py index 5d20fcfa50..c0978fb7aa 100644 --- a/tests/integration/ha_tests/test_upgrade_from_stable.py +++ b/tests/integration/ha_tests/test_upgrade_from_stable.py @@ -6,25 +6,26 @@ import pytest from pytest_operator.plugin import OpsTest -from tests.integration.ha_tests.helpers import ( - APPLICATION_NAME, - are_writes_increasing, - check_writes, - start_continuous_writes, -) -from tests.integration.helpers import ( +from ..helpers import ( DATABASE_APP_NAME, count_switchovers, get_leader_unit, get_primary, remove_chown_workaround, ) +from .helpers import ( + APPLICATION_NAME, + are_writes_increasing, + check_writes, + start_continuous_writes, +) logger = logging.getLogger(__name__) -TIMEOUT = 5 * 60 +TIMEOUT = 600 +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_deploy_stable(ops_test: OpsTest) -> None: """Simple test to ensure that the PostgreSQL and application charms get deployed.""" @@ -75,6 +76,7 @@ async def test_deploy_stable(ops_test: OpsTest) -> None: assert len(ops_test.model.applications[DATABASE_APP_NAME].units) == 3 +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_pre_upgrade_check(ops_test: OpsTest) -> None: """Test that the pre-upgrade-check action runs successfully.""" @@ -92,6 +94,7 @@ async def test_pre_upgrade_check(ops_test: OpsTest) -> None: await action.wait() +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_upgrade_from_stable(ops_test: OpsTest): """Test updating from stable channel.""" diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 24e7a6c0a9..1ebedf68a4 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -33,6 +33,7 @@ CHARM_SERIES = "jammy" METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) DATABASE_APP_NAME = METADATA["name"] +STORAGE_PATH = METADATA["storage"]["pgdata"]["location"] async def build_connection_string( @@ -299,7 +300,7 @@ async def deploy_and_relate_application_with_postgresql( apps=[application_name], status="active", raise_on_blocked=False, - timeout=1000, + timeout=1500, ) # Relate application to PostgreSQL. @@ -310,7 +311,7 @@ async def deploy_and_relate_application_with_postgresql( apps=[application_name], status="active", raise_on_blocked=False, # Application that needs a relation is blocked initially. - timeout=1000, + timeout=1500, ) return relation.id @@ -325,6 +326,7 @@ async def deploy_and_relate_bundle_with_postgresql( status: str = "active", status_message: str = None, overlay: Dict = None, + timeout: int = 2000, ) -> str: """Helper function to deploy and relate a bundle with PostgreSQL. @@ -342,6 +344,7 @@ async def deploy_and_relate_bundle_with_postgresql( status_message: Status message to wait for in the application after relating it to PostgreSQL. overlay: Optional overlay to be used when deploying the bundle. + timeout: Timeout to wait for the deployment to idle. """ # Deploy the bundle. with tempfile.NamedTemporaryFile(dir=os.getcwd()) as original: @@ -403,19 +406,19 @@ async def deploy_and_relate_bundle_with_postgresql( ops_test.model.wait_for_idle( apps=[DATABASE_APP_NAME], status="active", - timeout=1500, + timeout=timeout, ), ops_test.model.wait_for_idle( apps=[main_application_name], raise_on_blocked=False, status=status, - timeout=1500, + timeout=timeout, ), ] if status_message: awaits.append( ops_test.model.block_until( - lambda: unit.workload_status_message == status_message, timeout=1500 + lambda: unit.workload_status_message == status_message, timeout=timeout ) ) await asyncio.gather(*awaits) @@ -864,7 +867,7 @@ async def scale_application(ops_test: OpsTest, application_name: str, count: int ] await ops_test.model.applications[application_name].destroy_units(*units) await ops_test.model.wait_for_idle( - apps=[application_name], status="active", timeout=1500, wait_for_exact_units=count + apps=[application_name], status="active", timeout=2000, wait_for_exact_units=count ) diff --git a/tests/integration/juju_.py b/tests/integration/juju_.py new file mode 100644 index 0000000000..a2d250e4fc --- /dev/null +++ b/tests/integration/juju_.py @@ -0,0 +1,10 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import importlib.metadata + +# libjuju version != juju agent version, but the major version should be identical—which is good +# enough to check for secrets +_libjuju_version = importlib.metadata.version("juju") + +juju_major_version = int(_libjuju_version.split(".")[0]) diff --git a/tests/integration/markers.py b/tests/integration/markers.py new file mode 100644 index 0000000000..834cfac03e --- /dev/null +++ b/tests/integration/markers.py @@ -0,0 +1,10 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + + +import pytest + +from .juju_ import juju_major_version + +juju2 = pytest.mark.skipif(juju_major_version != 2, reason="Requires juju 2") +juju3 = pytest.mark.skipif(juju_major_version != 3, reason="Requires juju 3") diff --git a/tests/integration/new_relations/__init__.py b/tests/integration/new_relations/__init__.py new file mode 100644 index 0000000000..db3bfe1a65 --- /dev/null +++ b/tests/integration/new_relations/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/tests/integration/new_relations/test_new_relations.py b/tests/integration/new_relations/test_new_relations.py index 5362a40904..f982e0547d 100644 --- a/tests/integration/new_relations/test_new_relations.py +++ b/tests/integration/new_relations/test_new_relations.py @@ -12,8 +12,9 @@ import yaml from pytest_operator.plugin import OpsTest -from tests.integration.helpers import CHARM_SERIES, scale_application -from tests.integration.new_relations.helpers import ( +from ..helpers import CHARM_SERIES, scale_application +from ..juju_ import juju_major_version +from .helpers import ( build_connection_string, check_relation_data_existence, get_application_relation_data, @@ -35,6 +36,7 @@ INVALID_EXTRA_USER_ROLE_BLOCKING_MESSAGE = "invalid role(s) for extra user roles" +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_deploy_charms(ops_test: OpsTest, charm): """Deploy both charms (application and database) to use in the tests.""" @@ -68,6 +70,7 @@ async def test_deploy_charms(ops_test: OpsTest, charm): await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active", timeout=3000) +@pytest.mark.group(1) async def test_no_read_only_endpoint_in_standalone_cluster(ops_test: OpsTest): """Test that there is no read-only endpoint in a standalone cluster.""" async with ops_test.fast_forward(): @@ -84,7 +87,7 @@ async def test_no_read_only_endpoint_in_standalone_cluster(ops_test: OpsTest): await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") # Check that on juju 3 we have secrets and no username and password in the rel databag - if hasattr(ops_test.model, "list_secrets"): + if juju_major_version > 2: logger.info("checking for secrets") secret_uri, password = await asyncio.gather( get_application_relation_data( @@ -114,6 +117,7 @@ async def test_no_read_only_endpoint_in_standalone_cluster(ops_test: OpsTest): ) +@pytest.mark.group(1) async def test_read_only_endpoint_in_scaled_up_cluster(ops_test: OpsTest): """Test that there is read-only endpoint in a scaled up cluster.""" async with ops_test.fast_forward(): @@ -131,6 +135,7 @@ async def test_read_only_endpoint_in_scaled_up_cluster(ops_test: OpsTest): ) +@pytest.mark.group(1) async def test_database_relation_with_charm_libraries(ops_test: OpsTest): """Test basic functionality of database relation interface.""" # Get the connection string to connect to the database using the read/write endpoint. @@ -178,6 +183,7 @@ async def test_database_relation_with_charm_libraries(ops_test: OpsTest): cursor.execute("DROP TABLE test;") +@pytest.mark.group(1) async def test_user_with_extra_roles(ops_test: OpsTest): """Test superuser actions and the request for more permissions.""" # Get the connection string to connect to the database. @@ -198,6 +204,7 @@ async def test_user_with_extra_roles(ops_test: OpsTest): connection.close() +@pytest.mark.group(1) async def test_two_applications_doesnt_share_the_same_relation_data(ops_test: OpsTest): """Test that two different application connect to the database with different credentials.""" # Set some variables to use in this test. @@ -250,6 +257,7 @@ async def test_two_applications_doesnt_share_the_same_relation_data(ops_test: Op psycopg2.connect(connection_string) +@pytest.mark.group(1) async def test_an_application_can_connect_to_multiple_database_clusters(ops_test: OpsTest): """Test that an application can connect to different clusters of the same database.""" # Relate the application with both database clusters @@ -280,6 +288,7 @@ async def test_an_application_can_connect_to_multiple_database_clusters(ops_test assert application_connection_string != another_application_connection_string +@pytest.mark.group(1) async def test_an_application_can_connect_to_multiple_aliased_database_clusters(ops_test: OpsTest): """Test that an application can connect to different clusters of the same database.""" # Relate the application with both database clusters @@ -313,6 +322,7 @@ async def test_an_application_can_connect_to_multiple_aliased_database_clusters( assert application_connection_string != another_application_connection_string +@pytest.mark.group(1) async def test_an_application_can_request_multiple_databases(ops_test: OpsTest): """Test that an application can request additional databases using the same interface.""" # Relate the charms using another relation and wait for them exchanging some connection data. @@ -333,6 +343,7 @@ async def test_an_application_can_request_multiple_databases(ops_test: OpsTest): assert first_database_connection_string != second_database_connection_string +@pytest.mark.group(1) async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest): """Test that relation data, like connection data, is updated correctly when scaling.""" # Retrieve the list of current database unit names. @@ -342,7 +353,7 @@ async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest # Add two more units. await ops_test.model.applications[DATABASE_APP_NAME].add_units(2) await ops_test.model.wait_for_idle( - apps=[DATABASE_APP_NAME], status="active", timeout=1000, wait_for_exact_units=4 + apps=[DATABASE_APP_NAME], status="active", timeout=1500, wait_for_exact_units=4 ) # Remove the original units. @@ -399,6 +410,7 @@ async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest psycopg2.connect(primary_connection_string) +@pytest.mark.group(1) async def test_relation_with_no_database_name(ops_test: OpsTest): """Test that a relation with no database name doesn't block the charm.""" async with ops_test.fast_forward(): @@ -415,6 +427,7 @@ async def test_relation_with_no_database_name(ops_test: OpsTest): await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active", raise_on_blocked=True) +@pytest.mark.group(1) async def test_admin_role(ops_test: OpsTest): """Test that the admin role gives access to all the databases.""" all_app_names = [DATA_INTEGRATOR_APP_NAME] @@ -499,6 +512,7 @@ async def test_admin_role(ops_test: OpsTest): connection.close() +@pytest.mark.group(1) async def test_invalid_extra_user_roles(ops_test: OpsTest): async with ops_test.fast_forward(): # Remove the relation between the database and the first data integrator. diff --git a/tests/integration/test_backups.py b/tests/integration/test_backups.py index e028b481dd..254ceabcb4 100644 --- a/tests/integration/test_backups.py +++ b/tests/integration/test_backups.py @@ -10,7 +10,7 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_attempt, wait_exponential -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, construct_endpoint, @@ -84,7 +84,6 @@ async def test_none() -> None: pass -@pytest.mark.uses_secrets @pytest.mark.abort_on_fail async def test_backup(ops_test: OpsTest, cloud_configs: Tuple[Dict, Dict]) -> None: """Build and deploy two units of PostgreSQL and then test the backup and restore actions.""" @@ -222,8 +221,7 @@ async def test_backup(ops_test: OpsTest, cloud_configs: Tuple[Dict, Dict]) -> No await ops_test.model.remove_application(TLS_CERTIFICATES_APP_NAME, block_until_done=True) -@pytest.mark.uses_secrets -async def test_restore_on_new_cluster(ops_test: OpsTest) -> None: +async def test_restore_on_new_cluster(ops_test: OpsTest, github_secrets) -> None: """Test that is possible to restore a backup to another PostgreSQL cluster.""" charm = await ops_test.build_charm(".") database_app_name = f"new-{DATABASE_APP_NAME}" @@ -300,7 +298,6 @@ async def test_restore_on_new_cluster(ops_test: OpsTest) -> None: connection.close() -@pytest.mark.uses_secrets async def test_invalid_config_and_recovery_after_fixing_it( ops_test: OpsTest, cloud_configs: Tuple[Dict, Dict] ) -> None: diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index 011c0b46a8..d2a3026a82 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -10,12 +10,12 @@ import requests from psycopg2 import sql from pytest_operator.plugin import OpsTest -from tenacity import Retrying, stop_after_attempt, wait_exponential +from tenacity import Retrying, stop_after_attempt, wait_exponential, wait_fixed -from tests.helpers import STORAGE_PATH -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, + STORAGE_PATH, check_cluster_members, convert_records_to_dict, db_connect, @@ -32,6 +32,7 @@ UNIT_IDS = [0, 1, 2] +@pytest.mark.group(1) @pytest.mark.abort_on_fail @pytest.mark.skip_if_deployed async def test_deploy(ops_test: OpsTest, charm: str): @@ -51,10 +52,11 @@ async def test_deploy(ops_test: OpsTest, charm: str): # Reducing the update status frequency to speed up the triggering of deferred events. await ops_test.model.set_config({"update-status-hook-interval": "10s"}) - await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1500) assert ops_test.model.applications[DATABASE_APP_NAME].units[0].workload_status == "active" +@pytest.mark.group(1) @pytest.mark.abort_on_fail @pytest.mark.parametrize("unit_id", UNIT_IDS) async def test_database_is_up(ops_test: OpsTest, unit_id: int): @@ -65,6 +67,7 @@ async def test_database_is_up(ops_test: OpsTest, unit_id: int): assert result.status_code == 200 +@pytest.mark.group(1) @pytest.mark.parametrize("unit_id", UNIT_IDS) async def test_exporter_is_up(ops_test: OpsTest, unit_id: int): # Query Patroni REST API and check the status that indicates @@ -77,6 +80,7 @@ async def test_exporter_is_up(ops_test: OpsTest, unit_id: int): ), "Scrape error in postgresql_prometheus_exporter" +@pytest.mark.group(1) @pytest.mark.parametrize("unit_id", UNIT_IDS) async def test_settings_are_correct(ops_test: OpsTest, unit_id: int): # Connect to the PostgreSQL instance. @@ -162,6 +166,7 @@ async def test_settings_are_correct(ops_test: OpsTest, unit_id: int): assert unit.data["port-ranges"][0]["protocol"] == "tcp" +@pytest.mark.group(1) async def test_postgresql_parameters_change(ops_test: OpsTest) -> None: """Test that's possible to change PostgreSQL parameters.""" await ops_test.model.applications[DATABASE_APP_NAME].set_config( @@ -198,6 +203,7 @@ async def test_postgresql_parameters_change(ops_test: OpsTest) -> None: assert settings["lc_monetary"] == "en_GB.utf8" +@pytest.mark.group(1) async def test_scale_down_and_up(ops_test: OpsTest): """Test data is replicated to new units after a scale up.""" # Ensure the initial number of units in the application. @@ -268,7 +274,7 @@ async def test_scale_down_and_up(ops_test: OpsTest): await ops_test.model.wait_for_idle( apps=[DATABASE_APP_NAME], status="active", - timeout=1000, + timeout=2000, wait_for_exact_units=initial_scale, ) @@ -279,12 +285,15 @@ async def test_scale_down_and_up(ops_test: OpsTest): await scale_application(ops_test, DATABASE_APP_NAME, initial_scale) +@pytest.mark.group(1) async def test_persist_data_through_primary_deletion(ops_test: OpsTest): """Test data persists through a primary deletion.""" # Set a composite application name in order to test in more than one series at the same time. any_unit_name = ops_test.model.applications[DATABASE_APP_NAME].units[0].name - primary = await get_primary(ops_test, any_unit_name) - password = await get_password(ops_test, primary) + for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(5), reraise=True): + with attempt: + primary = await get_primary(ops_test, any_unit_name) + password = await get_password(ops_test, primary) # Write data to primary IP. host = get_unit_address(ops_test, primary) @@ -299,11 +308,11 @@ async def test_persist_data_through_primary_deletion(ops_test: OpsTest): await ops_test.model.destroy_units( primary, ) - await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1500) # Add the unit again. await ops_test.model.applications[DATABASE_APP_NAME].add_unit(count=1) - await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1500) # Testing write occurred to every postgres instance by reading from them for unit in ops_test.model.applications[DATABASE_APP_NAME].units: diff --git a/tests/integration/test_db.py b/tests/integration/test_db.py index 6002f39020..1d5aee7d46 100644 --- a/tests/integration/test_db.py +++ b/tests/integration/test_db.py @@ -11,7 +11,8 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_delay, wait_fixed -from tests.integration.helpers import ( +from . import markers +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, build_connection_string, @@ -32,6 +33,7 @@ RELATION_NAME = "db" +@pytest.mark.group(1) async def test_mailman3_core_db(ops_test: OpsTest, charm: str) -> None: """Deploy Mailman3 Core to test the 'db' relation.""" async with ops_test.fast_forward(): @@ -47,7 +49,7 @@ async def test_mailman3_core_db(ops_test: OpsTest, charm: str) -> None: await ops_test.model.wait_for_idle( apps=[DATABASE_APP_NAME], status="active", - timeout=1000, + timeout=1500, wait_for_exact_units=DATABASE_UNITS, ) @@ -97,6 +99,7 @@ async def test_mailman3_core_db(ops_test: OpsTest, charm: str) -> None: assert domain_name not in [domain.mail_host for domain in client.domains] +@pytest.mark.group(1) async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest): """Test that relation data, like connection data, is updated correctly when scaling.""" # Retrieve the list of current database unit names. @@ -106,7 +109,7 @@ async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest # Add two more units. await ops_test.model.applications[DATABASE_APP_NAME].add_units(2) await ops_test.model.wait_for_idle( - apps=[DATABASE_APP_NAME], status="active", timeout=1000, wait_for_exact_units=4 + apps=[DATABASE_APP_NAME], status="active", timeout=1500, wait_for_exact_units=4 ) # Remove the original units. @@ -169,6 +172,7 @@ async def test_relation_data_is_updated_correctly_when_scaling(ops_test: OpsTest psycopg2.connect(primary_connection_string) +@pytest.mark.group(1) @pytest.mark.unstable async def test_nextcloud_db_blocked(ops_test: OpsTest, charm: str) -> None: async with ops_test.fast_forward(): @@ -210,6 +214,7 @@ async def test_nextcloud_db_blocked(ops_test: OpsTest, charm: str) -> None: await ops_test.model.remove_application("nextcloud", block_until_done=True) +@pytest.mark.group(1) async def test_sentry_db_blocked(ops_test: OpsTest, charm: str) -> None: async with ops_test.fast_forward(): # Deploy Sentry and its dependencies. @@ -289,6 +294,7 @@ async def test_sentry_db_blocked(ops_test: OpsTest, charm: str) -> None: ) +@pytest.mark.group(1) @pytest.mark.unstable async def test_weebl_db(ops_test: OpsTest, charm: str) -> None: async with ops_test.fast_forward(): @@ -316,7 +322,8 @@ async def test_weebl_db(ops_test: OpsTest, charm: str) -> None: await ops_test.model.remove_application("weebl", block_until_done=True) -@pytest.mark.juju2 +@markers.juju2 +@pytest.mark.group(1) async def test_canonical_livepatch_onprem_bundle_db(ops_test: OpsTest) -> None: # Deploy and test the Livepatch onprem bundle (using this PostgreSQL charm # and an overlay to make the Ubuntu Advantage charm work with PostgreSQL). diff --git a/tests/integration/test_db_admin.py b/tests/integration/test_db_admin.py index fc588ba3c7..4c7868035b 100644 --- a/tests/integration/test_db_admin.py +++ b/tests/integration/test_db_admin.py @@ -4,10 +4,11 @@ import json import logging +import pytest from landscape_api.base import HTTPError, run_query from pytest_operator.plugin import OpsTest -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, check_database_users_existence, @@ -32,6 +33,7 @@ RELATION_NAME = "db-admin" +@pytest.mark.group(1) async def test_landscape_scalable_bundle_db(ops_test: OpsTest, charm: str) -> None: """Deploy Landscape Scalable Bundle to test the 'db-admin' relation.""" await ops_test.model.deploy( @@ -39,7 +41,7 @@ async def test_landscape_scalable_bundle_db(ops_test: OpsTest, charm: str) -> No application_name=DATABASE_APP_NAME, num_units=DATABASE_UNITS, series=CHARM_SERIES, - config={"profile": "testing"}, + config={"profile": "testing", "plugin_plpython3u_enable": "True"}, ) # Deploy and test the Landscape Scalable bundle (using this PostgreSQL charm). @@ -49,6 +51,7 @@ async def test_landscape_scalable_bundle_db(ops_test: OpsTest, charm: str) -> No LANDSCAPE_APP_NAME, main_application_num_units=2, relation_name=RELATION_NAME, + timeout=3000, ) await check_databases_creation( ops_test, @@ -117,7 +120,7 @@ async def test_landscape_scalable_bundle_db(ops_test: OpsTest, charm: str) -> No # rebooting the unit machine in the middle of a hook (what is needed when the issue from # https://bugs.launchpad.net/juju/+bug/1999758 happens). await ops_test.model.wait_for_idle( - apps=[DATABASE_APP_NAME], status="active", timeout=600, raise_on_error=False + apps=[DATABASE_APP_NAME], status="active", timeout=1500, raise_on_error=False ) await ensure_correct_relation_data(ops_test, DATABASE_UNITS, LANDSCAPE_APP_NAME, RELATION_NAME) diff --git a/tests/integration/test_password_rotation.py b/tests/integration/test_password_rotation.py index d8de9080f3..7690390e14 100644 --- a/tests/integration/test_password_rotation.py +++ b/tests/integration/test_password_rotation.py @@ -7,9 +7,9 @@ import pytest from pytest_operator.plugin import OpsTest -from tests.helpers import METADATA -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, + METADATA, check_patroni, get_leader_unit, get_password, @@ -20,6 +20,7 @@ APP_NAME = METADATA["name"] +@pytest.mark.group(1) @pytest.mark.abort_on_fail @pytest.mark.skip_if_deployed async def test_deploy_active(ops_test: OpsTest): @@ -33,9 +34,10 @@ async def test_deploy_active(ops_test: OpsTest): series=CHARM_SERIES, config={"profile": "testing"}, ) - await ops_test.model.wait_for_idle(apps=[APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[APP_NAME], status="active", timeout=1500) +@pytest.mark.group(1) async def test_password_rotation(ops_test: OpsTest): """Test password rotation action.""" # Get the initial passwords set for the system users. diff --git a/tests/integration/test_plugins.py b/tests/integration/test_plugins.py index 4a00e9f7d3..9a04432549 100644 --- a/tests/integration/test_plugins.py +++ b/tests/integration/test_plugins.py @@ -7,7 +7,7 @@ import pytest as pytest from pytest_operator.plugin import OpsTest -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, db_connect, @@ -59,6 +59,7 @@ MODDATETIME_EXTENSION_STATEMENT = "CREATE TABLE mdt (moddate timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE TRIGGER mdt_moddatetime BEFORE UPDATE ON mdt FOR EACH ROW EXECUTE PROCEDURE moddatetime (moddate);" +@pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_plugins(ops_test: OpsTest) -> None: """Build and deploy one unit of PostgreSQL and then test the available plugins.""" @@ -71,7 +72,7 @@ async def test_plugins(ops_test: OpsTest) -> None: series=CHARM_SERIES, config={"profile": "testing"}, ) - await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=1500) sql_tests = { "plugin_citext_enable": CITEXT_EXTENSION_STATEMENT, diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index 963afe13f1..6d82a6b720 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -8,10 +8,10 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_attempt, stop_after_delay, wait_exponential -from tests.helpers import METADATA -from tests.integration.helpers import ( +from .helpers import ( CHARM_SERIES, DATABASE_APP_NAME, + METADATA, change_primary_start_timeout, check_tls, check_tls_patroni_api, @@ -30,6 +30,7 @@ TLS_CERTIFICATES_APP_NAME = "tls-certificates-operator" +@pytest.mark.group(1) @pytest.mark.abort_on_fail @pytest.mark.skip_if_deployed async def test_deploy_active(ops_test: OpsTest): @@ -47,6 +48,7 @@ async def test_deploy_active(ops_test: OpsTest): # bundles don't wait between deploying charms. +@pytest.mark.group(1) async def test_tls_enabled(ops_test: OpsTest) -> None: """Test that TLS is enabled when relating to the TLS Certificates Operator.""" async with ops_test.fast_forward(): @@ -156,6 +158,7 @@ async def test_tls_enabled(ops_test: OpsTest) -> None: assert await check_tls_patroni_api(ops_test, unit.name, enabled=False) +@pytest.mark.group(1) @pytest.mark.skipif( not os.environ.get("RESTART_MACHINE_TEST"), reason="RESTART_MACHINE_TEST environment variable not set", diff --git a/tox.ini b/tox.ini index 81a657e469..8f90888e84 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ all_path = {[vars]src_path} {[vars]tests_path} [testenv] set_env = - PYTHONPATH = {tox_root}:{tox_root}/lib:{[vars]src_path} + PYTHONPATH = {tox_root}/lib:{[vars]src_path} PY_COLORS = 1 allowlist_externals = poetry @@ -64,254 +64,6 @@ commands = -m pytest -v --tb native -s {posargs} {[vars]tests_path}/unit poetry run coverage report -[testenv:backup-integration] -description = Run backup integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT - SECRETS_FROM_GITHUB -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_backups.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:charm-integration] -description = Run charm integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_charm.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:database-relation-integration] -description = Run database relation integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/new_relations/test_new_relations.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:db-relation-integration] -description = Run db relation integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_db.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:db-admin-relation-integration] -description = Run db-admin relation integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_db_admin.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:ha-replication-integration] -description = Run high availability replication integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha_tests/test_replication.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:ha-self-healing-integration] -description = Run high availability self healing integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha_tests/test_self_healing.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:ha-restore-cluster-integration] -description = Run high availability cluster restore integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha_tests/test_restore_cluster.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:password-rotation-integration] -description = Run password rotation integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_password_rotation.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:plugins-integration] -description = Run plugins integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_plugins.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:tls-integration] -description = Run TLS integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_tls.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:upgrade-integration] -description = Run upgrade integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha_tests/test_upgrade.py -commands_post = - {[testenv:pack-wrapper]commands_post} - -[testenv:upgrade-from-stable-integration] -description = Run upgrade from stable integration tests -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - CI - GITHUB_OUTPUT -allowlist_externals = - {[testenv:pack-wrapper]allowlist_externals} -commands_pre = - poetry install --only integration --no-root - {[testenv:pack-wrapper]commands_pre} -commands = - poetry run pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha_tests/test_upgrade_from_stable.py -commands_post = - {[testenv:pack-wrapper]commands_post} - [testenv:integration] description = Run integration tests set_env =