diff --git a/.github/workflows/_reusable-package-release.yml b/.github/workflows/_reusable-package-release.yml index 6cc9043a..edc3685a 100644 --- a/.github/workflows/_reusable-package-release.yml +++ b/.github/workflows/_reusable-package-release.yml @@ -94,13 +94,13 @@ jobs: echo "The `python-versions-array` input is required when `build-and-publish-python-package` is set to `true`." exit 1 - if: ${{ endsWith(github.repository, '/python-package-ci-cd') }} # Run the local action when this is run in the python-package-ci-cd repository - uses: ./actions/find-unreleased-changelog-items + uses: ./actions/find_unreleased_changelog_items with: release-level: ${{ inputs.release-level }} previous-changelog-filename: ${{ inputs.previous-changelog-filename }} previous-release-notes-filename: ${{ inputs.previous-release-notes-filename }} - if: ${{ !endsWith(github.repository, '/python-package-ci-cd') }} # Run the public action when this is run outside the python-package-ci-cd repository - uses: tektronix/python-package-ci-cd/actions/find-unreleased-changelog-items@main # TODO: update branch to tag + uses: tektronix/python-package-ci-cd/actions/find_unreleased_changelog_items@v0.0.0 with: release-level: ${{ inputs.release-level }} previous-changelog-filename: ${{ inputs.previous-changelog-filename }} @@ -125,12 +125,12 @@ jobs: fetch-depth: 0 token: ${{ secrets.checkout-token }} - if: ${{ endsWith(github.repository, '/python-package-ci-cd') }} # Run the local action when this is run in the python-package-ci-cd repository - uses: ./actions/find-unreleased-changelog-items + uses: ./actions/find_unreleased_changelog_items with: previous-changelog-filename: ${{ inputs.previous-changelog-filename }} previous-release-notes-filename: ${{ inputs.previous-release-notes-filename }} - if: ${{ !endsWith(github.repository, '/python-package-ci-cd') }} # Run the public action when this is run outside the python-package-ci-cd repository - uses: tektronix/python-package-ci-cd/actions/find-unreleased-changelog-items@main # TODO: update branch to tag + uses: tektronix/python-package-ci-cd/actions/find_unreleased_changelog_items@v0.0.0 with: previous-changelog-filename: ${{ inputs.previous-changelog-filename }} previous-release-notes-filename: ${{ inputs.previous-release-notes-filename }} @@ -225,7 +225,7 @@ jobs: name: Packages path: dist - name: Publish package distributions to GitHub Releases - uses: python-semantic-release/upload-to-gh-release@main + uses: python-semantic-release/upload-to-gh-release@v9.8.7 with: root_options: -v --strict github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_reusable-package-testpypi.yml b/.github/workflows/_reusable-package-testpypi.yml index 774d17cf..3197caf2 100644 --- a/.github/workflows/_reusable-package-testpypi.yml +++ b/.github/workflows/_reusable-package-testpypi.yml @@ -28,12 +28,12 @@ jobs: with: fetch-depth: 0 - if: ${{ endsWith(github.repository, '/python-package-ci-cd') }} # Run the local action when this is run in the python-package-ci-cd repository - uses: ./actions/create-unique-testpypi-version + uses: ./actions/create_unique_testpypi_version id: create-version with: package-name: ${{ inputs.package-name }} - if: ${{ !endsWith(github.repository, '/python-package-ci-cd') }} # Run the public action when this is run outside the python-package-ci-cd repository - uses: tektronix/python-package-ci-cd/actions/create-unique-testpypi-version@main # TODO: update branch to tag + uses: tektronix/python-package-ci-cd/actions/create_unique_testpypi_version@v0.0.0 id: create-version with: package-name: ${{ inputs.package-name }} diff --git a/.github/workflows/_reusable-update-python-and-pre-commit-dependencies.yml b/.github/workflows/_reusable-update-python-and-pre-commit-dependencies.yml index 6dfa005f..bd75d04d 100644 --- a/.github/workflows/_reusable-update-python-and-pre-commit-dependencies.yml +++ b/.github/workflows/_reusable-update-python-and-pre-commit-dependencies.yml @@ -78,7 +78,7 @@ jobs: git_user_signingkey: true git_commit_gpgsign: true - if: ${{ endsWith(github.repository, '/python-package-ci-cd') }} # Run the local action when this is run in the python-package-ci-cd repository - uses: ./actions/update-development-dependencies + uses: ./actions/update_development_dependencies with: dependency-dict: ${{ inputs.dependency-dict }} update-pre-commit: ${{ inputs.update-pre-commit }} @@ -86,7 +86,7 @@ jobs: pre-commit-hook-skip-list: ${{ inputs.pre-commit-hook-skip-list }} export-dependency-groups: ${{ inputs.export-dependency-groups }} - if: ${{ !endsWith(github.repository, '/python-package-ci-cd') }} # Run the public action when this is run outside the python-package-ci-cd repository - uses: tektronix/python-package-ci-cd/actions/update-development-dependencies@main # TODO: update branch to tag + uses: tektronix/python-package-ci-cd/actions/update_development_dependencies@v0.0.0 with: dependency-dict: ${{ inputs.dependency-dict }} update-pre-commit: ${{ inputs.update-pre-commit }} diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml new file mode 100644 index 00000000..dab66770 --- /dev/null +++ b/.github/workflows/dependency-review.yml @@ -0,0 +1,17 @@ +--- +name: Pull Request Dependency Review +on: + pull_request: + branches: [main] +permissions: + contents: read + pull-requests: write +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/dependency-review-action@v4 + with: + fail-on-severity: low + comment-summary-in-pr: on-failure diff --git a/.github/workflows/package-release.yml b/.github/workflows/package-release.yml index e2dbf2b5..f6d7348e 100644 --- a/.github/workflows/package-release.yml +++ b/.github/workflows/package-release.yml @@ -1,5 +1,5 @@ --- -name: Package Release +name: Publish to GitHub on: workflow_dispatch: inputs: @@ -16,7 +16,7 @@ concurrency: group: pypi jobs: package-release: - uses: tektronix/tm_devices/.github/workflows/package-release.yml@main + uses: ./.github/workflows/_reusable-package-release.yml with: package-name: python-package-ci-cd repo-name: tektronix/python-package-ci-cd diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml deleted file mode 100644 index b6ed79d7..00000000 --- a/.github/workflows/pre-commit.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -name: Run pre-commit -on: - push: - branches: [main] - pull_request: - branches: [main] -# Cancel running jobs for the same workflow and branch. -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} -jobs: - pre-commit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version-file: pyproject.toml - - name: Install dependencies - run: | - pip install poetry - poetry install - - uses: actions/cache@v4 - with: - path: ~/.cache/pre-commit - key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') - }} - - name: Run pre-commit - run: poetry run pre-commit run --all-files diff --git a/.github/workflows/publish-test-results.yml b/.github/workflows/publish-test-results.yml new file mode 100644 index 00000000..6471f1a3 --- /dev/null +++ b/.github/workflows/publish-test-results.yml @@ -0,0 +1,14 @@ +--- +name: Publish Test Results +on: + workflow_run: + workflows: [Test code] + types: [completed] +jobs: + publish-test-results: + uses: ./.github/workflows/check-api-for-breaking-changes.yml + with: + operating-systems-array: '["ubuntu"]' # this needs to match the operating-systems-array in the test-code.yml file + permissions: + checks: write + pull-requests: write diff --git a/.github/workflows/test-actions.yml b/.github/workflows/test-actions.yml index bcf8bc05..6ec24dc6 100644 --- a/.github/workflows/test-actions.yml +++ b/.github/workflows/test-actions.yml @@ -11,11 +11,11 @@ concurrency: cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} # IMPORTANT: Any new jobs need to be added to the check-tests-passed job to ensure they correctly gate code changes jobs: - test-create-unique-testpypi-version: + test-create_unique_testpypi_version: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: ./actions/create-unique-testpypi-version + - uses: ./actions/create_unique_testpypi_version id: create-version with: package-name: example-package # this is an example package that is never updated @@ -27,7 +27,7 @@ jobs: echo "Actual: ${{ steps.create-version.outputs.new-version }}" exit 1 fi - test-find-unreleased-changelog-items: + test-find_unreleased_changelog_items: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -46,7 +46,7 @@ jobs: - Something was done here in the past EOF cp temp_changelog.md CHANGELOG.md - - uses: ./actions/find-unreleased-changelog-items + - uses: ./actions/find_unreleased_changelog_items with: release-level: patch previous-changelog-filename: .testing_previous_changelog_for_template.md @@ -115,24 +115,24 @@ jobs: diff <(echo "$MULTILINE_STRING") "$FILE_PATH" exit 1 fi - test-update-development-dependencies: + test-update_development_dependencies: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: ./actions/update-development-dependencies + - uses: ./actions/update_development_dependencies with: update-pre-commit: true run-pre-commit: true dependency-dict: '{"dev": ["pyright"]}' pre-commit-hook-skip-list: remove-tabs,forbid-tabs,check-readthedocs,check-dependabot,check-github-actions,check-github-workflows,commitizen,blacken-docs,yamlfix,hadolint,mdformat,markdown-link-check,check-poetry,toml-sort-fix,pyright,poetry-audit,ruff,ruff-format,docformatter - export-dependency-groups: udd:actions/update-development-dependencies,cutv:actions/create-unique-testpypi-version,fci:actions/find-unreleased-changelog-items + export-dependency-groups: udd:actions/update_development_dependencies,cutv:actions/create_unique_testpypi_version,fci:actions/find_unreleased_changelog_items # Check that all jobs passed check-action-tests-passed: if: ${{ !cancelled() }} needs: - - test-create-unique-testpypi-version - - test-find-unreleased-changelog-items - - test-update-development-dependencies + - test-create_unique_testpypi_version + - test-find_unreleased_changelog_items + - test-update_development_dependencies runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed diff --git a/.github/workflows/test-code.yml b/.github/workflows/test-code.yml new file mode 100644 index 00000000..e826af35 --- /dev/null +++ b/.github/workflows/test-code.yml @@ -0,0 +1,21 @@ +--- +name: Test code +on: + push: + branches: [main] + pull_request: + branches: [main] +# Cancel running jobs for the same workflow and branch. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} +jobs: + test-code: + uses: ./.github/workflows/_reusable-test-code.yml + with: + repo-name: tektronix/python-package-ci-cd + operating-systems-array: '["ubuntu"]' # this needs to match the operating-systems-array in the publish-test-results.yml file + python-versions-array: '["3.12"]' # this needs to match the [tool.poetry.dependencies.python] version in the pyproject.toml file + upload-to-codecov: true + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/update-python-and-pre-commit-dependencies.yml b/.github/workflows/update-python-and-pre-commit-dependencies.yml index c9abdbbd..9002c217 100644 --- a/.github/workflows/update-python-and-pre-commit-dependencies.yml +++ b/.github/workflows/update-python-and-pre-commit-dependencies.yml @@ -13,7 +13,7 @@ jobs: update-pre-commit: true run-pre-commit: true pre-commit-hook-skip-list: pyright,poetry-audit - export-dependency-groups: udd:actions/update-development-dependencies,cutv:actions/create-unique-testpypi-version,fci:actions/find-unreleased-changelog-items + export-dependency-groups: udd:actions/update_development_dependencies,cutv:actions/create_unique_testpypi_version,fci:actions/find_unreleased_changelog_items permissions: contents: write secrets: diff --git a/.gitignore b/.gitignore index 1fc6d1c3..d5353cb9 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,15 @@ ENV*/ env.bak/ venv.bak/ +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage* +.cache +coverage.xml +.pytest_cache/ +.results*/ + # ruff .ruff_cache/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c0f003f..eebe9b5e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,10 +51,9 @@ repos: additional_dependencies: [black==24.4.2] # This may need to be updated/removed in the future once ruff supports formatting Python code blocks in markdown args: [--line-length=100] - repo: https://github.com/lyz-code/yamlfix - rev: 47039c9bf8039e81f092c9777a1bc8be32fb7870 # frozen: 1.16.0 + rev: 8072181c0f2eab9f2dd8db2eb3b9556d7cd0bd74 # frozen: 1.17.0 hooks: - id: yamlfix - additional_dependencies: [maison<2.0.0] - repo: https://github.com/AleksaC/hadolint-py rev: e70baeefd566058716df2f29eae8fe8ffc213a9f # frozen: v2.12.1b3 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index b3aacde1..14cf9c52 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,33 @@ # Changelog ---- +The format is based on [Keep a Changelog](https://keepachangelog.com), and this +project adheres to [Semantic Versioning](https://semver.org). -## Unreleased +Valid subsections within a version are: -Things to be included in the next release go here. - -### Added - -- Something will be added +- Added +- Changed +- Deprecated +- Removed +- Fixed +- Security --- -## v0.0.1 +## Unreleased + +Things to be included in the next release go here. ### Added -- Something was done here in the past +- Added a reusable workflow to check the Python API for breaking changes: [`check-api-for-breaking-changes.yml`](./workflows/check-api-for-breaking-changes.md) +- Added a reusable workflow to perform CodeQL analysis: [`codeql-analysis.yml`](./workflows/codeql-analysis.md) +- Added a reusable workflow to enforce Open-Source community standards: [`enforce-community-standards.yml`](./workflows/enforce-community-standards.md) +- Added a reusable workflow to build a Python package: [`package-build.yml`](./workflows/package-build.md) +- Added a reusable workflow to upload a Python package to TestPyPI: [`package-testpypi.yml`](./workflows/package-testpypi.md) +- Added a reusable workflow to publish API comparison results as a Pull Request comment: [`publish-api-comparison.yml`](./workflows/publish-api-comparison.md) +- Added a reusable workflow to publish test results as a Pull Request comment: [`publish-test-results.yml`](./workflows/publish-test-results.md) +- Added a reusable workflow to create a Software Bill of Materials (SBOM) and scan it: [`sbom-scan.yml`](./workflows/sbom-scan.md) +- Added a reusable workflow to run tests and linting against Python package code: [`test-code.yml`](./workflows/test-code.md) +- Added a reusable workflow to run documentation builds and tests for a Python package: [`test-docs.yml`](./workflows/test-docs.md) +- Added a reusable workflow to update Python and `pre-commit` dependencies: [`update-python-and-pre-commit-dependencies.yml`](./workflows/update-python-and-pre-commit-dependencies.md) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 136970cc..755d54d5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -55,7 +55,7 @@ Ready to contribute? Here's how to set up `python-package-ci-cd` for local devel - Using the helper script (recommended): ```console - python contributor_setup.py + python scripts/contributor_setup.py ``` 4. Check to see if there are any [open issues](https://github.com/tektronix/python-package-ci-cd/issues) or [pull requests](https://github.com/tektronix/python-package-ci-cd/pulls) that are related to the change you wish to make. diff --git a/README.md b/README.md index 75a82533..b87a0f40 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,9 @@ -| | | -| ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Testing** | [![Action testing status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-actions.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-actions.yml) | -| **Code Quality** | [![CodeQL status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/codeql-analysis.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/codeql-analysis.yml) [![CodeFactor grade](https://www.codefactor.io/repository/github/tektronix/python-package-ci-cd/badge)](https://www.codefactor.io/repository/github/tektronix/python-package-ci-cd) [![pre-commit status](https://results.pre-commit.ci/badge/github/tektronix/python-package-ci-cd/main.svg)](https://results.pre-commit.ci/latest/github/tektronix/python-package-ci-cd/main) | +| | | +| ---------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Testing** | [![Code testing status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-code.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-code.yml) [![Action testing status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-actions.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/test-actions.yml) [![Coverage status](https://codecov.io/gh/tektronix/python-package-ci-cd/branch/main/graph/badge.svg)](https://codecov.io/gh/tektronix/python-package-ci-cd) | +| **Code Quality** | [![CodeQL status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/codeql-analysis.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/codeql-analysis.yml) [![CodeFactor grade](https://www.codefactor.io/repository/github/tektronix/python-package-ci-cd/badge)](https://www.codefactor.io/repository/github/tektronix/python-package-ci-cd) [![pre-commit status](https://results.pre-commit.ci/badge/github/tektronix/python-package-ci-cd/main.svg)](https://results.pre-commit.ci/latest/github/tektronix/python-package-ci-cd/main) | +| **Repository** | [![License: Apache 2.0](https://img.shields.io/github/license/tektronix/python-package-ci-cd)](https://github.com/tektronix/python-package-ci-cd/blob/main/LICENSE.md) [![GitHub Release status](https://github.com/tektronix/python-package-ci-cd/actions/workflows/package-release.yml/badge.svg?branch=main)](https://github.com/tektronix/python-package-ci-cd/actions/workflows/package-release.yml) | +| **Linting** | [![pre-commit enabled](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit)](https://github.com/pre-commit/pre-commit) | --- @@ -12,14 +14,14 @@ Python Packaging CI/CD. ## Actions -- [`create-unique-testpypi-version`](./actions/create-unique-testpypi-version/readme.md) +- [`create_unique_testpypi_version`](actions/create_unique_testpypi_version/readme.md) - This action creates a unique version number for the provided Python package to enable uploading the package to [TestPyPI](https://test.pypi.org). -- [`find-unreleased-changelog-items`](./actions/find-unreleased-changelog-items/readme.md) +- [`find_unreleased_changelog_items`](./actions/find_unreleased_changelog_items/readme.md) - This action will parse the repository's `CHANGELOG.md` file to determine if there are any unreleased items. It will fail if it cannot find any unreleased items, as this means that the package is not ready for a new release. -- [`update-development-dependencies`](./actions/update-development-dependencies/readme.md) +- [`update_development_dependencies`](./actions/update_development_dependencies/readme.md) - This action enables updating Python development dependencies using the [`Poetry`](https://python-poetry.org/) package manager in-sync with [`pre-commit`](https://pre-commit.com/) hooks. diff --git a/actions/create-unique-testpypi-version/Dockerfile b/actions/create_unique_testpypi_version/Dockerfile similarity index 100% rename from actions/create-unique-testpypi-version/Dockerfile rename to actions/create_unique_testpypi_version/Dockerfile diff --git a/actions/create-unique-testpypi-version/action.yml b/actions/create_unique_testpypi_version/action.yml similarity index 100% rename from actions/create-unique-testpypi-version/action.yml rename to actions/create_unique_testpypi_version/action.yml diff --git a/actions/create-unique-testpypi-version/main.py b/actions/create_unique_testpypi_version/main.py similarity index 53% rename from actions/create-unique-testpypi-version/main.py rename to actions/create_unique_testpypi_version/main.py index a2fd9b9d..7839d399 100644 --- a/actions/create-unique-testpypi-version/main.py +++ b/actions/create_unique_testpypi_version/main.py @@ -10,6 +10,8 @@ This script needs to be run from a directory that contains a `pyproject.toml` file. """ +from __future__ import annotations + import os import pathlib @@ -19,44 +21,67 @@ from poetry.core.constraints.version import Version from pypi_simple import PyPISimple -_ENV_VAR_TRUE_VALUES = {"1", "true", "yes"} - PYPROJECT_FILE = pathlib.Path("./pyproject.toml") -def main(package_name: str) -> None: - """Run the script to create the new version number. +def create_new_post_version( + package_name: str, latest_version: str | None, local_version: str +) -> str: + """Create a new `.postN` version, incrementing `N` if necessary. Args: - package_name: The name of the package create a unique version for. - """ - test_pypi_server = PyPISimple("https://test.pypi.org/simple/") - print(f"Checking for the latest version of `{package_name}` on test.pypi.org...") + package_name: The name of the package. + latest_version: The current version of the package. + local_version: The version of the package from the pyproject.toml file. - # Get the latest version of the package on test.pypi.org - latest_version = Version.parse( - test_pypi_server.get_project_page(package_name).packages[-1].version # pyright: ignore[reportArgumentType] - ) - print(f"Current version of `{package_name}` is: {latest_version}") + Returns: + The new `.postN` version as a string. + """ + if latest_version: + parsed_version = Version.parse(latest_version) + else: + parsed_version = Version.parse(local_version) + print(f"Current version of `{package_name}` is: {parsed_version}") # Create the .postN version suffix new_post_release_num = 1 - if latest_version.post: - new_post_release_num += latest_version.post.number + if parsed_version.post: + new_post_release_num += parsed_version.post.number # Create the new version number updated_version = Version.parse( - f"{'.'.join(str(x) for x in latest_version.parts)}.post{new_post_release_num}" + f"{'.'.join(str(x) for x in parsed_version.parts)}.post{new_post_release_num}" ) print(f"New version of `{package_name}` will be: {updated_version}") - # Update the pyproject.toml file with the new version number (only if running in GitHub Actions) - print("Updating the pyproject.toml file with the new version...") - # Read in the current data + return updated_version.to_string() + + +def main() -> None: + """Run the script to create the new version number.""" + # Load in the GitHub Action inputs + # See https://docs.github.com/en/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs + package_name = os.environ["INPUT_PACKAGE-NAME"] + # Connect to the test.pypi.org server + test_pypi_server = PyPISimple("https://test.pypi.org/simple/") + print(f"Checking for the latest version of `{package_name}` on test.pypi.org...") + + # Load in the current data from the pyproject.toml file to + # read the current, local package version with PYPROJECT_FILE.open("rb") as file_handle: pyproject_data = tomli.load(file_handle) + local_version = pyproject_data["tool"]["poetry"]["version"] + # Get the latest version of the package on test.pypi.org and create the new version + updated_version = create_new_post_version( + package_name, + test_pypi_server.get_project_page(package_name).packages[-1].version, + local_version, + ) + + # Update the pyproject.toml file with the new version number + print("Updating the pyproject.toml file with the new version...") # Modify the version value - pyproject_data["tool"]["poetry"]["version"] = updated_version.to_string() + pyproject_data["tool"]["poetry"]["version"] = updated_version # Write back the data to the file with PYPROJECT_FILE.open("wb") as file_handle: tomli_w.dump(pyproject_data, file_handle) @@ -66,7 +91,6 @@ def main(package_name: str) -> None: github_output_file_handle.write(f"new-version={updated_version}\n") -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # Run the main function - # See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables - main(package_name=os.environ["INPUT_PACKAGE-NAME"]) + main() diff --git a/actions/create-unique-testpypi-version/readme.md b/actions/create_unique_testpypi_version/readme.md similarity index 94% rename from actions/create-unique-testpypi-version/readme.md rename to actions/create_unique_testpypi_version/readme.md index 6655f34f..a84da229 100644 --- a/actions/create-unique-testpypi-version/readme.md +++ b/actions/create_unique_testpypi_version/readme.md @@ -1,4 +1,4 @@ -# create-unique-testpypi-version +# create_unique_testpypi_version This action creates a unique version number for the provided Python package to enable uploading the package to [TestPyPI](https://test.pypi.org/). @@ -37,7 +37,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: ./actions/create-unique-testpypi-version + - uses: ./actions/create_unique_testpypi_version id: create-version with: package-name: my-package # required diff --git a/actions/create-unique-testpypi-version/requirements.txt b/actions/create_unique_testpypi_version/requirements.txt similarity index 100% rename from actions/create-unique-testpypi-version/requirements.txt rename to actions/create_unique_testpypi_version/requirements.txt diff --git a/actions/find-unreleased-changelog-items/Dockerfile b/actions/find_unreleased_changelog_items/Dockerfile similarity index 100% rename from actions/find-unreleased-changelog-items/Dockerfile rename to actions/find_unreleased_changelog_items/Dockerfile diff --git a/actions/find-unreleased-changelog-items/action.yml b/actions/find_unreleased_changelog_items/action.yml similarity index 100% rename from actions/find-unreleased-changelog-items/action.yml rename to actions/find_unreleased_changelog_items/action.yml diff --git a/actions/find-unreleased-changelog-items/main.py b/actions/find_unreleased_changelog_items/main.py similarity index 75% rename from actions/find-unreleased-changelog-items/main.py rename to actions/find_unreleased_changelog_items/main.py index d0140298..fd6a9235 100644 --- a/actions/find-unreleased-changelog-items/main.py +++ b/actions/find_unreleased_changelog_items/main.py @@ -21,12 +21,11 @@ import tomli -_ENV_VAR_TRUE_VALUES = {"1", "true", "yes"} PYPROJECT_FILE = pathlib.Path("./pyproject.toml") -CHANGELOG_FILEPATH = pathlib.Path("./CHANGELOG.md") +CHANGELOG_FILE = pathlib.Path("./CHANGELOG.md") -def _find_template_folder() -> pathlib.Path: +def find_template_folder() -> pathlib.Path: """Find the template folder from the pyproject.toml file. Returns: @@ -43,24 +42,19 @@ def _find_template_folder() -> pathlib.Path: return template_folder -def main( - filename_for_previous_changelog: str, - filename_for_previous_release_notes: str, - release_level: str | None, -) -> None: +def main() -> None: """Check for entries in the Unreleased section of the CHANGELOG.md file. - Args: - filename_for_previous_changelog: The filename to use to create the previous changelog file. - filename_for_previous_release_notes: The filename to use to create the previous - release notes file. - release_level: The release level to output to the GitHub Workflow Summary. - Raises: SystemExit: Indicates no new entries were found. """ + # Load in the GitHub Action inputs + # See https://docs.github.com/en/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs + filename_for_previous_changelog = os.environ["INPUT_PREVIOUS-CHANGELOG-FILENAME"] + filename_for_previous_release_notes = os.environ["INPUT_PREVIOUS-RELEASE-NOTES-FILENAME"] + release_level = os.getenv("INPUT_RELEASE-LEVEL") # Set the filepaths for the template files - template_folder = _find_template_folder() + template_folder = find_template_folder() template_changelog_filepath = template_folder / pathlib.Path(filename_for_previous_changelog) template_release_notes_filepath = template_folder / pathlib.Path( filename_for_previous_release_notes @@ -68,7 +62,7 @@ def main( release_notes_content = "" found_entries = False - with CHANGELOG_FILEPATH.open(mode="r", encoding="utf-8") as changelog_file: + with CHANGELOG_FILE.open(mode="r", encoding="utf-8") as changelog_file: tracking_unreleased = False tracking_entries = False for line in changelog_file: @@ -94,11 +88,11 @@ def main( found_entries = bool(re.match(r"^- \w+", line)) if not found_entries: - msg = f"No unreleased entries were found in {CHANGELOG_FILEPATH}." + msg = f"No unreleased entries were found in {CHANGELOG_FILE}." raise SystemExit(msg) # Copy the files to the correct location - shutil.copy(CHANGELOG_FILEPATH, template_changelog_filepath) + shutil.copy(CHANGELOG_FILE, template_changelog_filepath) with template_release_notes_filepath.open("w", encoding="utf-8") as template_release_notes: template_release_notes.write(release_notes_content.strip() + "\n") @@ -116,11 +110,6 @@ def main( summary_file.write(summary_contents) -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # Run the main function - # See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables - main( - filename_for_previous_changelog=os.environ["INPUT_PREVIOUS-CHANGELOG-FILENAME"], - filename_for_previous_release_notes=os.environ["INPUT_PREVIOUS-RELEASE-NOTES-FILENAME"], - release_level=os.getenv("INPUT_RELEASE-LEVEL"), - ) + main() diff --git a/actions/find-unreleased-changelog-items/readme.md b/actions/find_unreleased_changelog_items/readme.md similarity index 95% rename from actions/find-unreleased-changelog-items/readme.md rename to actions/find_unreleased_changelog_items/readme.md index 29004a0a..950bd5c8 100644 --- a/actions/find-unreleased-changelog-items/readme.md +++ b/actions/find_unreleased_changelog_items/readme.md @@ -1,4 +1,4 @@ -# find-unreleased-changelog-items +# find_unreleased_changelog_items This action will parse the repository's `CHANGELOG.md` file to determine if there are any unreleased items. It will fail if it cannot find any unreleased @@ -48,7 +48,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: tektronix/python-package-ci-cd/actions/find-unreleased-changelog-items@main # it is recommended to use the latest release tag instead of `main` + - uses: tektronix/python-package-ci-cd/actions/find_unreleased_changelog_items@main # it is recommended to use the latest release tag instead of `main` with: release-level: ${{ inputs.release-level }} # optional previous-changelog-filename: .previous_changelog_for_template.md # optional diff --git a/actions/find-unreleased-changelog-items/requirements.txt b/actions/find_unreleased_changelog_items/requirements.txt similarity index 100% rename from actions/find-unreleased-changelog-items/requirements.txt rename to actions/find_unreleased_changelog_items/requirements.txt diff --git a/actions/update-development-dependencies/Dockerfile b/actions/update_development_dependencies/Dockerfile similarity index 100% rename from actions/update-development-dependencies/Dockerfile rename to actions/update_development_dependencies/Dockerfile diff --git a/actions/update-development-dependencies/action.yml b/actions/update_development_dependencies/action.yml similarity index 100% rename from actions/update-development-dependencies/action.yml rename to actions/update_development_dependencies/action.yml diff --git a/actions/update-development-dependencies/main.py b/actions/update_development_dependencies/main.py similarity index 64% rename from actions/update-development-dependencies/main.py rename to actions/update_development_dependencies/main.py index bc02e6f1..9c1e157a 100644 --- a/actions/update-development-dependencies/main.py +++ b/actions/update_development_dependencies/main.py @@ -26,7 +26,7 @@ _ENV_VAR_TRUE_VALUES = {"1", "true", "yes"} -def _convert_dict_input(input_str: str) -> dict[str, list[str]]: +def convert_dict_input(input_str: str) -> dict[str, list[str]]: """Parse the input string into a dictionary of the required type. Args: @@ -54,7 +54,7 @@ def _convert_dict_input(input_str: str) -> dict[str, list[str]]: raise ValueError(msg) from e -def _run_cmd_in_subprocess(command: str) -> None: +def run_cmd_in_subprocess(command: str) -> None: """Run the given command in a subprocess. Args: @@ -65,7 +65,7 @@ def _run_cmd_in_subprocess(command: str) -> None: subprocess.check_call(shlex.split(command)) # noqa: S603 -def _update_poetry_dependencies( +def update_poetry_dependencies( python_executable: str, repository_root_directory: Path, dependencies_to_update: dict[str, list[str]], @@ -89,8 +89,8 @@ def _update_poetry_dependencies( for group, dependencies_list in dependencies_to_update.items(): dependencies = " ".join(f'"{x.split("[", maxsplit=1)[0]}"' for x in dependencies_list) group_arg = f" --group={group}" if group else "" - _run_cmd_in_subprocess( - f'"{python_executable}" -m poetry remove --lock{group_arg} {dependencies}', + run_cmd_in_subprocess( + f'"{python_executable}" -m poetry remove --lock{group_arg} {dependencies}' ) # Get the latest versions for each of the dependencies to update @@ -110,25 +110,23 @@ def _update_poetry_dependencies( poetry_add_cmd = f'"{python_executable}" -m poetry add{group_arg} {dependencies}' if lock_only: poetry_add_cmd += " --lock" - _run_cmd_in_subprocess(poetry_add_cmd) + run_cmd_in_subprocess(poetry_add_cmd) # Run poetry update poetry_update_cmd = f'"{python_executable}" -m poetry update' if lock_only: poetry_update_cmd += " --lock" - _run_cmd_in_subprocess(poetry_update_cmd) + run_cmd_in_subprocess(poetry_update_cmd) # Fix the formatting of the pyproject.toml file python_script_location = Path(python_executable).parent - _run_cmd_in_subprocess( + run_cmd_in_subprocess( f'"{python_script_location}/toml-sort" ' - f'"{repository_root_directory}/pyproject.toml" --in-place --sort-table-keys', + f'"{repository_root_directory}/pyproject.toml" --in-place --sort-table-keys' ) -def _update_pre_commit_dependencies( - python_executable: str, repository_root_directory: Path -) -> None: +def update_pre_commit_dependencies(python_executable: str, repository_root_directory: Path) -> None: """Update the pre-commit dependencies in the .pre-commit-config.yaml file. This function will also fix the formatting of the yaml file using the `yamlfix` package. @@ -137,12 +135,12 @@ def _update_pre_commit_dependencies( python_executable: The path to the python executable to use. repository_root_directory: The root directory of the repository. """ - _run_cmd_in_subprocess( + run_cmd_in_subprocess( f"git config --global --add safe.directory " f'"{repository_root_directory.resolve().as_posix()}"' ) # Update pre-commit config file - _run_cmd_in_subprocess(f'"{python_executable}" -m pre_commit autoupdate --freeze') + run_cmd_in_subprocess(f'"{python_executable}" -m pre_commit autoupdate --freeze') # Fix the formatting of the pre-commit config file with warnings.catch_warnings(): @@ -150,7 +148,19 @@ def _update_pre_commit_dependencies( fix_files([f"{repository_root_directory}/.pre-commit-config.yaml"]) -def _export_requirements_files(python_executable: str, dependency_groups: list[str]) -> None: +def sort_requirements_file(file_path: Path) -> None: + """Sort the lines in the given requirements file. + + Args: + file_path: The path to the requirements file to sort. + """ + with file_path.open() as file: + lines = sorted(file.readlines(), key=lambda x: x.lower().split("==")[0]) + with file_path.open("w") as file: + file.writelines(lines) + + +def export_requirements_files(python_executable: str, dependency_groups: list[str]) -> None: """Export the requirements files for the specified dependency groups. This function uses the `poetry export` command to generate the requirements files for the @@ -160,19 +170,7 @@ def _export_requirements_files(python_executable: str, dependency_groups: list[s python_executable: The path to the python executable to use. dependency_groups: The list of dependency groups to export the requirements for. """ - - def _sort_requirements_file(file_path: Path) -> None: - """Sort the lines in the given requirements file. - - Args: - file_path: The path to the requirements file to sort. - """ - with file_path.open() as file: - lines = sorted(file.readlines(), key=lambda x: x.lower().split("==")[0]) - with file_path.open("w") as file: - file.writelines(lines) - - _run_cmd_in_subprocess(f'"{python_executable}" -m poetry config warnings.export false') + run_cmd_in_subprocess(f'"{python_executable}" -m poetry config warnings.export false') for group_output_pair in dependency_groups: if ":" in group_output_pair: @@ -180,72 +178,47 @@ def _sort_requirements_file(file_path: Path) -> None: else: group = group_output_pair output_folder = group - _run_cmd_in_subprocess( + run_cmd_in_subprocess( f'"{python_executable}" -m poetry export --only {group} ' - f"--without-hashes --output {output_folder}/requirements.txt", + f"--without-hashes --output {output_folder}/requirements.txt" ) - _sort_requirements_file(Path(f"{output_folder}/requirements.txt")) - - -def main( - repo_root: str, - dependency_dict: dict[str, list[str]], - export_dependency_groups: list[str], - pre_commit_hook_skip_list: str, - *, - install_dependencies: bool, - run_pre_commit: bool, - update_pre_commit: bool, -) -> None: - """Run the script to update the development dependencies. - - Args: - repo_root: The root directory of the repository. - dependency_dict: The dictionary of dependency groups to update, where each key is a group - and each value is a list of dependencies to update within that group. - export_dependency_groups: The list of dependency groups to export the requirements for, - along with optional folder paths. - pre_commit_hook_skip_list: The list of pre-commit hooks to skip. - install_dependencies: A boolean indicating if the dependencies should be installed. - run_pre_commit: A boolean indicating if the pre-commit hooks should be run. - update_pre_commit: A boolean indicating if the pre-commit hooks should be updated. - """ + sort_requirements_file(Path(f"{output_folder}/requirements.txt")) + + +def main() -> None: + """Run the script to update the development dependencies.""" + # Load in the GitHub Action inputs + # See https://docs.github.com/en/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs + repo_root = os.environ["INPUT_REPO-ROOT"] + dependency_dict = convert_dict_input(os.environ["INPUT_DEPENDENCY-DICT"]) + export_dependency_groups = [ + x for x in os.environ["INPUT_EXPORT-DEPENDENCY-GROUPS"].split(",") if x + ] + pre_commit_hook_skip_list = os.environ["INPUT_PRE-COMMIT-HOOK-SKIP-LIST"] + install_dependencies = os.environ["INPUT_INSTALL-DEPENDENCIES"].lower() in _ENV_VAR_TRUE_VALUES + run_pre_commit = os.environ["INPUT_RUN-PRE-COMMIT"].lower() in _ENV_VAR_TRUE_VALUES + update_pre_commit = os.environ["INPUT_UPDATE-PRE-COMMIT"].lower() in _ENV_VAR_TRUE_VALUES python_executable = sys.executable repo_root_path = Path(repo_root).resolve() os.chdir(repo_root_path) print(f"\nUpdating development dependencies in {Path.cwd()}") - _update_poetry_dependencies( - python_executable, - repo_root_path, - dependency_dict, - lock_only=not install_dependencies, + update_poetry_dependencies( + python_executable, repo_root_path, dependency_dict, lock_only=not install_dependencies ) if update_pre_commit or run_pre_commit: - _update_pre_commit_dependencies(python_executable, repo_root_path) + update_pre_commit_dependencies(python_executable, repo_root_path) if export_dependency_groups: - _export_requirements_files(python_executable, export_dependency_groups) + export_requirements_files(python_executable, export_dependency_groups) if run_pre_commit: # Run the pre-commit hooks, ignore any errors since they are # just being run to auto-fix files. with contextlib.suppress(subprocess.CalledProcessError): os.environ["SKIP"] = pre_commit_hook_skip_list - _run_cmd_in_subprocess(f'"{python_executable}" -m pre_commit run --all-files') + run_cmd_in_subprocess(f'"{python_executable}" -m pre_commit run --all-files') -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # Run the main function - # See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables - main( - repo_root=os.environ["INPUT_REPO-ROOT"], - dependency_dict=_convert_dict_input(os.environ["INPUT_DEPENDENCY-DICT"]), - export_dependency_groups=[ - x for x in os.environ["INPUT_EXPORT-DEPENDENCY-GROUPS"].split(",") if x - ], - pre_commit_hook_skip_list=os.environ["INPUT_PRE-COMMIT-HOOK-SKIP-LIST"], - install_dependencies=os.environ["INPUT_INSTALL-DEPENDENCIES"].lower() - in _ENV_VAR_TRUE_VALUES, - run_pre_commit=os.environ["INPUT_RUN-PRE-COMMIT"].lower() in _ENV_VAR_TRUE_VALUES, - update_pre_commit=os.environ["INPUT_UPDATE-PRE-COMMIT"].lower() in _ENV_VAR_TRUE_VALUES, - ) + main() diff --git a/actions/update-development-dependencies/readme.md b/actions/update_development_dependencies/readme.md similarity index 97% rename from actions/update-development-dependencies/readme.md rename to actions/update_development_dependencies/readme.md index 77fcbefb..12911336 100644 --- a/actions/update-development-dependencies/readme.md +++ b/actions/update_development_dependencies/readme.md @@ -1,4 +1,4 @@ -# update-development-dependencies +# update_development_dependencies This action enables updating Python development dependencies using the [Poetry](https://python-poetry.org/) package manager in-sync with @@ -46,7 +46,7 @@ jobs: ref: ${{ github.head_ref }} token: ${{ secrets.checkout-token }} - - uses: tektronix/python-package-ci-cd/actions/update-development-dependencies@main # it is recommended to use the latest release tag instead of `main` + - uses: tektronix/python-package-ci-cd/actions/update_development_dependencies@main # it is recommended to use the latest release tag instead of `main` with: repo-root: . # optional, defaults to the current working directory install-dependencies: false # optional, this will almost never need to be set to true diff --git a/actions/update-development-dependencies/requirements.txt b/actions/update_development_dependencies/requirements.txt similarity index 96% rename from actions/update-development-dependencies/requirements.txt rename to actions/update_development_dependencies/requirements.txt index 36d8f657..0cd989a4 100644 --- a/actions/update-development-dependencies/requirements.txt +++ b/actions/update_development_dependencies/requirements.txt @@ -9,7 +9,7 @@ cfgv==3.4.0 ; python_version >= "3.12" and python_version < "3.13" charset-normalizer==3.3.2 ; python_version >= "3.12" and python_version < "3.13" cleo==2.1.0 ; python_version >= "3.12" and python_version < "3.13" click==8.1.7 ; python_version >= "3.12" and python_version < "3.13" -colorama==0.4.6 ; python_version >= "3.12" and python_version < "3.13" and (platform_system == "Windows" or os_name == "nt") +colorama==0.4.6 ; python_version >= "3.12" and python_version < "3.13" and (os_name == "nt" or platform_system == "Windows") crashtest==0.4.1 ; python_version >= "3.12" and python_version < "3.13" cryptography==43.0.0 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "linux" distlib==0.3.8 ; python_version >= "3.12" and python_version < "3.13" @@ -24,7 +24,7 @@ jaraco-classes==3.4.0 ; python_version >= "3.12" and python_version < "3.13" jeepney==0.8.0 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "linux" keyring==24.3.1 ; python_version >= "3.12" and python_version < "3.13" mailbits==0.2.1 ; python_version >= "3.12" and python_version < "3.13" -maison==1.4.3 ; python_version >= "3.12" and python_version < "3.13" +maison==2.0.0 ; python_version >= "3.12" and python_version < "3.13" more-itertools==10.4.0 ; python_version >= "3.12" and python_version < "3.13" msgpack==1.0.8 ; python_version >= "3.12" and python_version < "3.13" nodeenv==1.9.1 ; python_version >= "3.12" and python_version < "3.13" @@ -60,4 +60,4 @@ typing-extensions==4.12.2 ; python_version >= "3.12" and python_version < "3.13" urllib3==2.2.2 ; python_version >= "3.12" and python_version < "3.13" virtualenv==20.26.3 ; python_version >= "3.12" and python_version < "3.13" xattr==1.1.0 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "darwin" -yamlfix==1.16.0 ; python_version >= "3.12" and python_version < "3.13" +yamlfix==1.17.0 ; python_version >= "3.12" and python_version < "3.13" diff --git a/pyproject.toml b/pyproject.toml index 33fb088d..4c957181 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,23 @@ [tool] +[tool.coverage.report] +exclude_lines = [ + "if TYPE_CHECKING:", + "pragma: no cover" +] +fail_under = 100 +include_namespace_packages = true +omit = [ + "contributor_setup.py" +] +show_missing = true +skip_empty = true + +[tool.coverage.run] +branch = true +cover_pylib = false +source = ["actions", "scripts"] + [tool.docformatter] close-quotes-on-newline = true in-place = true @@ -14,7 +32,7 @@ version = "0.0.0" [tool.poetry.dependencies] python = "~3.12" -[tool.poetry.group.cutv.dependencies] # dependencies for actions/create-unique-testpypi-version +[tool.poetry.group.cutv.dependencies] # dependencies for actions/create_unique_testpypi_version poetry-core = "^1.9.0" pypi-simple = "^1.6.0" tomli = "^2.0.1" @@ -24,13 +42,27 @@ tomli-w = "^1.0.0" pip = "^24.0" poetry-audit-plugin = "^0.4.0" poetry-pre-commit-plugin = "^0.1.2" +pre-commit = "^3.8.0" pyright = "1.1.377" python-semantic-release = "^9.8.7" +tox = "^4.18.0" +tox-gh-actions = "^3.2.0" -[tool.poetry.group.fci.dependencies] # dependencies for actions/find-unreleased-changelog-items +[tool.poetry.group.fci.dependencies] # dependencies for actions/find_unreleased_changelog_items tomli = "^2.0.1" -[tool.poetry.group.udd.dependencies] # dependencies for actions/update-development-dependencies +[tool.poetry.group.tests.dependencies] +coverage = "^7.5.0" +linkchecker = "^10.0.0" +pytest = "^8.2.0" +pytest-cov = "^5.0.0" +pytest-depends = "^1.0.1" +pytest-env = "^1.1.3" +pytest-github-report = "^0.0.1" +pytest-html = "^4.1.1" +pytest-order = "^1.2.1" + +[tool.poetry.group.udd.dependencies] # dependencies for actions/update_development_dependencies poetry = "^1.8.0" poetry-plugin-export = "^1.7.1" pre-commit = "^3.7" @@ -58,6 +90,22 @@ strict = ["**"] typeCheckingMode = "strict" useLibraryCodeForTypes = true +[tool.pytest.ini_options] +addopts = "--cov-config=pyproject.toml" +filterwarnings = [ +] +junit_family = "xunit2" +junit_logging = "all" +markers = [ + 'docs', + 'order' +] +pythonpath = "." +xfail_strict = true + +[tool.pytest_env] +pytest_report_title = {skip_if_set = true, value = "Test Results"} + [tool.ruff] line-length = 100 output-format = "concise" @@ -92,8 +140,15 @@ lines-between-types = 1 order-by-type = false [tool.ruff.lint.per-file-ignores] +"tests/**" = [ + "PLR2004", # Magic value used in comparison + "S101" # Use of assert detected +] [tool.semantic_release] +build_command = """ + python -m scripts.bump_version_in_files +""" version_toml = [ "pyproject.toml:tool.poetry.version" ] @@ -116,3 +171,42 @@ all = true in_place = true spaces_before_inline_comment = 2 overrides."tool.poetry.*".inline_arrays = false + +[tool.tox] +legacy_tox_ini = """ +[tox] +requires = tox>4 +no_package = True +envlist = py312,tests +skip_missing_interpreters = False + +[gh-actions] +python = + 3.12: py312 + +[testenv] +install_command = python -I -m pip install --upgrade --upgrade-strategy=eager {opts} {packages} +deps = + poetry +commands_pre = + poetry install +commands = + !tests: pre-commit run --all-files + pytest -vv --showlocals --cov --junitxml={tox_root}/.results_{envname}/results.xml --cov-report=term --cov-report=xml:{tox_root}/.coverage_{envname}.xml --cov-report=html:{tox_root}/.results_{envname}/html --self-contained-html --html={tox_root}/.results_{envname}/results.html + +[testenv:tests] +basepython = python +deps = + -r tests/requirements.txt + -r actions/create_unique_testpypi_version/requirements.txt + -r actions/find_unreleased_changelog_items/requirements.txt + -r actions/update_development_dependencies/requirements.txt +passenv = + pytest_report_title +setenv = + pytest_github_report = true + pytest_use_blanks = true + GITHUB_STEP_SUMMARY = {tox_root}/.results_{envname}/github_report.md +commands_pre = + +""" diff --git a/scripts/bump_version_in_files.py b/scripts/bump_version_in_files.py new file mode 100644 index 00000000..9e05e97e --- /dev/null +++ b/scripts/bump_version_in_files.py @@ -0,0 +1,61 @@ +"""This script will bump the version of the GitHub Actions to the newest version. + +This script is intended to be run as a part of the release process in the `python-semantic-release` +`build-command`. +""" + +from __future__ import annotations + +import os +import re + +from pathlib import Path + +DIRECTORIES_TO_SEARCH = [".github", "workflows", "actions"] +FILES_TO_UPDATE = [Path("README.md")] +GITHUB_WORKFLOW_AND_ACTION_REGEX = re.compile( + r"(uses: tektronix/python-package-ci-cd/.*?)@v\d+\.\d+\.\d+" +) + + +def get_file_paths(directory_list: list[str]) -> list[Path]: + """Get a list of file paths from the given directories. + + Args: + directory_list: A list of directories to search for files in. + """ + file_paths: list[Path] = [] + for directory in directory_list: + for dirpath, _, filenames in os.walk(directory): + for filename in filenames: + file_paths.append(Path(dirpath) / Path(filename)) # noqa: PERF401 + return file_paths + + +def update_github_actions_version(filepath: Path, incoming_version: str) -> None: + """Update the version of the GitHub Actions to the incoming version. + + Args: + filepath: The path to the file to update. + incoming_version: The version to update the file to + """ + file_content = filepath.read_text() + # Check if there's a match before replacing + if GITHUB_WORKFLOW_AND_ACTION_REGEX.search(file_content): + # Replace the version numbers with the new version + updated_content = GITHUB_WORKFLOW_AND_ACTION_REGEX.sub( + rf"\1@v{incoming_version}", file_content + ) + print(f'Bumping version in "{filepath}" to', incoming_version) + filepath.write_text(updated_content) + else: + print(f'No GitHub Workflow/Action usage found in "{filepath}", skipping update.') + + +if __name__ == "__main__": # pragma: no cover + if not (new_version := os.getenv("NEW_VERSION")): + msg = "NEW_VERSION environment variable is not set" + raise SystemExit(msg) + + for file_path in get_file_paths(DIRECTORIES_TO_SEARCH) + FILES_TO_UPDATE: + update_github_actions_version(file_path, new_version) diff --git a/contributor_setup.py b/scripts/contributor_setup.py similarity index 100% rename from contributor_setup.py rename to scripts/contributor_setup.py diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 00000000..da18bbd8 --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,40 @@ +beautifulsoup4==4.12.3 ; python_version >= "3.12" and python_version < "3.13" +certifi==2024.7.4 ; python_version >= "3.12" and python_version < "3.13" +chardet==5.2.0 ; python_version >= "3.12" and python_version < "3.13" +charset-normalizer==3.3.2 ; python_version >= "3.12" and python_version < "3.13" +colorama==0.4.6 ; python_version >= "3.12" and python_version < "3.13" +coverage==7.6.1 ; python_version >= "3.12" and python_version < "3.13" +coverage[toml]==7.6.1 ; python_version >= "3.12" and python_version < "3.13" +dataproperty==1.0.1 ; python_version >= "3.12" and python_version < "3.13" +dnspython==2.6.1 ; python_version >= "3.12" and python_version < "3.13" +future-fstrings==1.2.0 ; python_version >= "3.12" and python_version < "3.13" +idna==3.7 ; python_version >= "3.12" and python_version < "3.13" +iniconfig==2.0.0 ; python_version >= "3.12" and python_version < "3.13" +jinja2==3.1.4 ; python_version >= "3.12" and python_version < "3.13" +linkchecker==10.4.0 ; python_version >= "3.12" and python_version < "3.13" +markupsafe==2.1.5 ; python_version >= "3.12" and python_version < "3.13" +mbstrdecoder==1.1.3 ; python_version >= "3.12" and python_version < "3.13" +networkx==3.3 ; python_version >= "3.12" and python_version < "3.13" +packaging==24.1 ; python_version >= "3.12" and python_version < "3.13" +pathvalidate==3.2.0 ; python_version >= "3.12" and python_version < "3.13" +pluggy==1.5.0 ; python_version >= "3.12" and python_version < "3.13" +pytablewriter==1.2.0 ; python_version >= "3.12" and python_version < "3.13" +pytest==8.3.2 ; python_version >= "3.12" and python_version < "3.13" +pytest-cov==5.0.0 ; python_version >= "3.12" and python_version < "3.13" +pytest-depends==1.0.1 ; python_version >= "3.12" and python_version < "3.13" +pytest-env==1.1.3 ; python_version >= "3.12" and python_version < "3.13" +pytest-github-report==0.0.1 ; python_version >= "3.12" and python_version < "3.13" +pytest-html==4.1.1 ; python_version >= "3.12" and python_version < "3.13" +pytest-metadata==3.1.1 ; python_version >= "3.12" and python_version < "3.13" +pytest-order==1.2.1 ; python_version >= "3.12" and python_version < "3.13" +python-dateutil==2.9.0.post0 ; python_version >= "3.12" and python_version < "3.13" +pytz==2024.1 ; python_version >= "3.12" and python_version < "3.13" +requests==2.32.3 ; python_version >= "3.12" and python_version < "3.13" +setuptools==73.0.1 ; python_version >= "3.12" and python_version < "3.13" +six==1.16.0 ; python_version >= "3.12" and python_version < "3.13" +soupsieve==2.6 ; python_version >= "3.12" and python_version < "3.13" +tabledata==1.3.3 ; python_version >= "3.12" and python_version < "3.13" +tcolorpy==0.1.6 ; python_version >= "3.12" and python_version < "3.13" +typepy==1.3.2 ; python_version >= "3.12" and python_version < "3.13" +typepy[datetime]==1.3.2 ; python_version >= "3.12" and python_version < "3.13" +urllib3==2.2.2 ; python_version >= "3.12" and python_version < "3.13" diff --git a/tests/test_bump_version_in_files.py b/tests/test_bump_version_in_files.py new file mode 100644 index 00000000..deb2cce0 --- /dev/null +++ b/tests/test_bump_version_in_files.py @@ -0,0 +1,56 @@ +"""Test the bump_version_in_files module.""" + +from pathlib import Path + +import pytest + +from scripts.bump_version_in_files import get_file_paths, update_github_actions_version + + +@pytest.fixture() +def temporary_directory(tmp_path: Path) -> Path: + """Create a temporary directory.""" + # Create a temporary directory with some files + test_dir = tmp_path / "test_dir" + test_dir.mkdir() + (test_dir / "file1.txt").write_text("test") + (test_dir / "file2.txt").write_text("test") + (test_dir / "file3.txt").write_text("test") + return test_dir + + +def test_get_file_paths(temporary_directory: Path) -> None: + """Test the get_file_paths function.""" + path_list = get_file_paths([temporary_directory.as_posix()]) + assert len(path_list) == 3 + assert all(file_path.is_file() for file_path in path_list) + + +def test_update_github_actions_version_no_match(temporary_directory: Path) -> None: + """Update version when no match is found.""" + file_path = temporary_directory / "file1.txt" + file_path.write_text("no match here") + update_github_actions_version(file_path, "1.2.3") + assert file_path.read_text() == "no match here" + + +def test_update_github_actions_version_with_match(temporary_directory: Path) -> None: + """Update version when a match is found.""" + file_path = temporary_directory / "file2.txt" + file_path.write_text("uses: tektronix/python-package-ci-cd/some-action@v1.0.0") + update_github_actions_version(file_path, "1.2.3") + assert file_path.read_text() == "uses: tektronix/python-package-ci-cd/some-action@v1.2.3" + + +def test_update_github_actions_version_multiple_matches(temporary_directory: Path) -> None: + """Update version when multiple matches are found.""" + file_path = temporary_directory / "file3.txt" + file_path.write_text( + "uses: tektronix/python-package-ci-cd/action1@v1.0.0\n" + "uses: tektronix/python-package-ci-cd/action2@v2.0.0" + ) + update_github_actions_version(file_path, "1.2.3") + assert file_path.read_text() == ( + "uses: tektronix/python-package-ci-cd/action1@v1.2.3\n" + "uses: tektronix/python-package-ci-cd/action2@v1.2.3" + ) diff --git a/tests/test_create_unique_testpypi_version.py b/tests/test_create_unique_testpypi_version.py new file mode 100644 index 00000000..b7084347 --- /dev/null +++ b/tests/test_create_unique_testpypi_version.py @@ -0,0 +1,127 @@ +"""Test the create_unique_testpypi_version action Python code.""" + +from __future__ import annotations + +from typing import Generator, TYPE_CHECKING +from unittest.mock import MagicMock, patch + +import pytest + +import actions.create_unique_testpypi_version.main + +from actions.create_unique_testpypi_version.main import create_new_post_version, main + +if TYPE_CHECKING: + from pathlib import Path + +# Sample data for mocking +PACKAGE_NAME = "example-package" +CURRENT_VERSION = "0.1.0" +LATEST_VERSION = "0.1.0.post1" +NEW_VERSION = "0.1.0.post2" + + +@pytest.fixture(autouse=True) +def _mock_environment(monkeypatch: pytest.MonkeyPatch) -> None: # pyright: ignore[reportUnusedFunction] + """Mock the environment variables. + + Args: + monkeypatch (fixture): The monkeypatch fixture. + """ + # Mock environment variables + monkeypatch.setenv("INPUT_PACKAGE-NAME", PACKAGE_NAME) + + +@pytest.fixture() +def mock_testpypi_server() -> Generator[MagicMock, None, None]: + """Mock the PyPISimple class and its methods.""" + with patch("actions.create_unique_testpypi_version.main.PyPISimple") as mock_pypi_simple: + mock_server = mock_pypi_simple.return_value + mock_project_page = MagicMock() + mock_project_page.packages = [MagicMock(version=LATEST_VERSION)] + mock_server.get_project_page.return_value = mock_project_page + yield mock_server + + +@pytest.fixture() +def mock_pyproject_file(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Mock the pyproject.toml file. + + Args: + tmp_path (fixture): The temporary path fixture. + monkeypatch (fixture): The monkeypatch fixture. + """ + pyproject_file = tmp_path / "pyproject.toml" + pyproject_file.write_text(f""" + [tool.poetry] + name = "{PACKAGE_NAME}" + version = "{CURRENT_VERSION}" + """) + monkeypatch.setattr( + actions.create_unique_testpypi_version.main, "PYPROJECT_FILE", pyproject_file + ) + return pyproject_file + + +@pytest.fixture() +def mock_github_output_file(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Mock the GitHub output file. + + Args: + tmp_path (fixture): The temporary path fixture. + monkeypatch (fixture): The monkeypatch fixture. + """ + github_output_file = tmp_path / "github_output" + monkeypatch.setenv("GITHUB_OUTPUT", str(github_output_file)) + return github_output_file + + +def test_main( + mock_testpypi_server: MagicMock, # noqa: ARG001 + mock_pyproject_file: Path, + mock_github_output_file: Path, +) -> None: + """Test the main function. + + Args: + mock_testpypi_server (fixture): Mock the PyPISimple class and its methods. + mock_pyproject_file (fixture): Mock the pyproject.toml file. + mock_github_output_file (fixture): Mock the GitHub output file. + """ + main() + + # Check the pyproject.toml file for the updated version + with mock_pyproject_file.open("r") as f: + data = f.read() + assert f'version = "{NEW_VERSION}"' in data + + # Check the GitHub output file for the new version variable + with mock_github_output_file.open("r") as f: + data = f.read() + assert f"new-version={NEW_VERSION}\n" in data + + +@pytest.mark.parametrize( + ("package_name", "latest_version", "local_version", "expected_version"), + [ + ("example-package", "1.0.0", "0.1.0", "1.0.0.post1"), + ("example-package", "1.0.0.post10", "0.1.0", "1.0.0.post11"), + ("example-package", None, "0.1.0", "0.1.0.post1"), + ("example-package", "0.1.0.post3", "0.1.0", "0.1.0.post4"), + ("example-package", "2.3.4", "2.3.4", "2.3.4.post1"), + ("example-package", None, "2.3.4.post5", "2.3.4.post6"), + ], +) +def test_create_new_post_version( + package_name: str, latest_version: str | None, local_version: str, expected_version: str +) -> None: + """Test the create_new_post_version function. + + Args: + package_name: The name of the package. + latest_version: The latest version of the package. + local_version: The local version of the package. + expected_version: The expected new version of the package. + """ + result = create_new_post_version(package_name, latest_version, local_version) + assert result == expected_version diff --git a/tests/test_find_unreleased_changelog_items.py b/tests/test_find_unreleased_changelog_items.py new file mode 100644 index 00000000..8eed8805 --- /dev/null +++ b/tests/test_find_unreleased_changelog_items.py @@ -0,0 +1,205 @@ +"""Test the find_unreleased_changelog_items action Python code.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from actions.find_unreleased_changelog_items.main import find_template_folder, main + +PREVIOUS_CHANGELOG_FILENAME = "previous_changelog.md" +PREVIOUS_RELEASE_NOTES_FILENAME = "previous_release_notes.md" +MOCK_TEMPLATES_FOLDER = "mock_templates" + + +@pytest.fixture() +def mock_pyproject_file(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> tuple[Path, Path]: + """Mock the pyproject.toml file. + + Args: + monkeypatch: The monkeypatch fixture. + tmp_path: The temporary path fixture. + + Returns: + The path to the pyproject.toml file and the path to the template folder. + """ + pyproject_content = f""" + [tool.semantic_release.changelog] + template_dir = "{MOCK_TEMPLATES_FOLDER}" + """ + mock_path = tmp_path / "pyproject.toml" + mock_path.write_text(pyproject_content) + monkeypatch.setattr("actions.find_unreleased_changelog_items.main.PYPROJECT_FILE", mock_path) + template_folder = tmp_path / MOCK_TEMPLATES_FOLDER + template_folder.mkdir() + return mock_path, template_folder + + +@pytest.fixture() +def mock_changelog_file(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Mock the pyproject.toml file. + + Args: + monkeypatch: The monkeypatch fixture. + tmp_path: The temporary path fixture. + + Returns: + The path to the changelog file. + """ + changelog_content = """# Changelog +--- +## Unreleased +### Added +- New feature +""" + mock_path = tmp_path / "CHANGELOG.md" + mock_path.write_text(changelog_content) + monkeypatch.setattr("actions.find_unreleased_changelog_items.main.CHANGELOG_FILE", mock_path) + return mock_path + + +@pytest.fixture() +def summary_file(tmp_path: Path) -> Path: + """Create a summary file for the GitHub Actions step. + + Args: + tmp_path: The temporary path fixture. + + Returns: + The path to the job summary file. + """ + return tmp_path / "github_summary.txt" + + +@pytest.fixture() +def mock_env_vars(tmp_path: Path, monkeypatch: pytest.MonkeyPatch, summary_file: Path) -> None: + """Mock the environment variables to simulate GitHub Actions inputs. + + Args: + tmp_path: The temporary path fixture. + monkeypatch: The monkeypatch fixture. + summary_file: The path to the job summary file. + """ + # Change the working directory + monkeypatch.chdir(tmp_path) + monkeypatch.setenv("INPUT_PREVIOUS-CHANGELOG-FILENAME", PREVIOUS_CHANGELOG_FILENAME) + monkeypatch.setenv("INPUT_PREVIOUS-RELEASE-NOTES-FILENAME", PREVIOUS_RELEASE_NOTES_FILENAME) + monkeypatch.setenv("INPUT_RELEASE-LEVEL", "minor") + monkeypatch.setenv("GITHUB_STEP_SUMMARY", str(summary_file)) + + +@pytest.mark.parametrize( + ("pyproject_content", "expected_template_folder"), + [ + ( + '[tool.semantic_release.changelog]\ntemplate_dir = "mock_templates"\n', + Path("mock_templates"), + ), + ( + "[tool.semantic_release.changelog]\n", + Path("templates"), + ), + ], +) +def test_find_template_folder( + mock_pyproject_file: tuple[Path, Path], pyproject_content: str, expected_template_folder: Path +) -> None: + """Test the find_template_folder function. + + Args: + mock_pyproject_file: Mock the pyproject.toml file. + pyproject_content: The content to write to the pyproject.toml file. + expected_template_folder: The expected template folder path. + """ + mock_pyproject_file[0].write_text(pyproject_content) + template_folder = find_template_folder() + assert template_folder == expected_template_folder + + +def test_main_no_unreleased_entries( + mock_env_vars: None, # noqa: ARG001 + mock_changelog_file: Path, + summary_file: Path, # noqa: ARG001 + mock_pyproject_file: Path, # noqa: ARG001 +) -> None: + """Test the main function when no unreleased entries are found. + + Args: + mock_env_vars: Mock the environment variables. + mock_changelog_file: Mock the changelog file. + summary_file: Mock the environment variables. + mock_pyproject_file: Mock the pyproject.toml file. + """ + # Modify the changelog content to have no unreleased entries + changelog_content = """# Changelog +--- +## Released +### Added +- Released feature +""" + mock_changelog_file.write_text(changelog_content) + + with pytest.raises(SystemExit, match="No unreleased entries were found in.*"): + main() + + +def test_main_with_unreleased_entries( + mock_env_vars: None, # noqa: ARG001 + mock_pyproject_file: tuple[Path, Path], + mock_changelog_file: Path, + summary_file: Path, +) -> None: + """Test the main function when unreleased entries are found. + + Args: + mock_env_vars: Mock the environment variables. + mock_pyproject_file: Mock the pyproject.toml file. + mock_changelog_file: Mock the changelog file. + summary_file: Mock the environment variables. + """ + _, template_folder = mock_pyproject_file + template_changelog_file = template_folder / PREVIOUS_CHANGELOG_FILENAME + template_release_notes_file = template_folder / PREVIOUS_RELEASE_NOTES_FILENAME + main() + + assert template_changelog_file.read_text() == mock_changelog_file.read_text() + assert ( + template_release_notes_file.read_text().strip() == "## Unreleased\n### Added\n- New feature" + ) + + with summary_file.open("r") as summary_file_handle: + summary_contents = summary_file_handle.read() + assert "## Workflow Inputs\n- release-level: minor\n" in summary_contents + assert "## Incoming Changes\n### Added\n- New feature" in summary_contents + + +def test_main_with_no_release_level( + mock_env_vars: None, # noqa: ARG001 + mock_pyproject_file: tuple[Path, Path], + mock_changelog_file: Path, + summary_file: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test the main function when unreleased entries are found but no release_level is provided. + + Args: + mock_env_vars: Mock the environment variables. + mock_pyproject_file: Mock the pyproject.toml file. + mock_changelog_file: Mock the changelog file. + summary_file: Mock the environment variables. + monkeypatch: The monkeypatch fixture. + """ + _, template_folder = mock_pyproject_file + template_changelog_file = template_folder / PREVIOUS_CHANGELOG_FILENAME + template_release_notes_file = template_folder / PREVIOUS_RELEASE_NOTES_FILENAME + + # Unset the INPUT_RELEASE-LEVEL environment variable + monkeypatch.delenv("INPUT_RELEASE-LEVEL", raising=False) + main() + + assert template_changelog_file.read_text() == mock_changelog_file.read_text() + assert ( + template_release_notes_file.read_text().strip() == "## Unreleased\n### Added\n- New feature" + ) + assert not summary_file.exists() diff --git a/tests/test_update_development_dependencies.py b/tests/test_update_development_dependencies.py new file mode 100644 index 00000000..cc724c7c --- /dev/null +++ b/tests/test_update_development_dependencies.py @@ -0,0 +1,290 @@ +"""Test the update_development_dependencies action Python code.""" + +import sys + +from pathlib import Path +from typing import Generator +from unittest.mock import call, MagicMock, patch + +import pytest + +from actions.update_development_dependencies.main import ( + convert_dict_input, + export_requirements_files, + main, + sort_requirements_file, + update_poetry_dependencies, + update_pre_commit_dependencies, +) + +PYTHON_EXECUTABLE = Path(sys.executable).as_posix() + + +@pytest.fixture(autouse=True) +def mock_pypi_server() -> Generator[MagicMock, None, None]: + """Mock the PyPISimple class and its methods.""" + with patch("actions.update_development_dependencies.main.PyPISimple") as mock_pypi_simple: + mock_server = mock_pypi_simple.return_value + mock_project_page = MagicMock() + mock_project_page.packages = [MagicMock(version="1.0.0")] + mock_server.get_project_page.return_value = mock_project_page + yield mock_server + + +@pytest.fixture() +def repo_root_dir( + monkeypatch: pytest.MonkeyPatch, + tmp_path: Path, +) -> Path: + """Fixture to mock environment variables and repo root.""" + # Set up necessary files + repo_root_directory = tmp_path / "repo" + repo_root_directory.mkdir() + monkeypatch.chdir(repo_root_directory) + (repo_root_directory / ".pre-commit-config.yaml").touch() + (repo_root_directory / "dev").mkdir() + (repo_root_directory / "dev" / "requirements.txt").touch() + + monkeypatch.setenv("INPUT_REPO-ROOT", str(repo_root_directory)) + monkeypatch.setenv("INPUT_DEPENDENCY-DICT", '{"dev": ["pytest"]}') + monkeypatch.setenv("INPUT_EXPORT-DEPENDENCY-GROUPS", "dev") + monkeypatch.setenv("INPUT_PRE-COMMIT-HOOK-SKIP-LIST", "") + monkeypatch.setenv("INPUT_INSTALL-DEPENDENCIES", "true") + monkeypatch.setenv("INPUT_RUN-PRE-COMMIT", "true") + monkeypatch.setenv("INPUT_UPDATE-PRE-COMMIT", "true") + + return repo_root_directory + + +def test_update_poetry_dependencies( + repo_root_dir: Path, + monkeypatch: pytest.MonkeyPatch, # noqa: ARG001 +) -> None: + """Test the update_poetry_dependencies function.""" + with patch("subprocess.check_call") as mock_subproc_call: + dependencies_to_update = {"dev": ["pytest"]} + + update_poetry_dependencies( + PYTHON_EXECUTABLE, repo_root_dir, dependencies_to_update, lock_only=False + ) + + # Check the calls to subprocess.check_call + expected_calls = [ + call([PYTHON_EXECUTABLE, "-m", "poetry", "remove", "--lock", "--group=dev", "pytest"]), + call( + [ + PYTHON_EXECUTABLE, + "-m", + "poetry", + "add", + "--group=dev", + "pytest==1.0.0", + ] + ), + call([PYTHON_EXECUTABLE, "-m", "poetry", "update"]), + call( + [ + f"{Path(PYTHON_EXECUTABLE).parent.as_posix()}/toml-sort", + f"{repo_root_dir.as_posix()}/pyproject.toml", + "--in-place", + "--sort-table-keys", + ] + ), + ] + assert mock_subproc_call.call_count == 4 + mock_subproc_call.assert_has_calls(expected_calls, any_order=True) + + +def test_update_pre_commit_dependencies( + repo_root_dir: Path, + monkeypatch: pytest.MonkeyPatch, # noqa: ARG001 +) -> None: + """Test the update_pre_commit_dependencies function.""" + with patch("subprocess.check_call") as mock_subproc_call: + update_pre_commit_dependencies(sys.executable, repo_root_dir) + + # Check the calls to subprocess.check_call + expected_calls = [ + call( + [ + "git", + "config", + "--global", + "--add", + "safe.directory", + f"{repo_root_dir.resolve().as_posix()}", + ] + ), + call([PYTHON_EXECUTABLE, "-m", "pre_commit", "autoupdate", "--freeze"]), + ] + assert mock_subproc_call.call_count == 2 + mock_subproc_call.assert_has_calls(expected_calls, any_order=True) + + +def test_export_requirements_files( + repo_root_dir: Path, + monkeypatch: pytest.MonkeyPatch, # noqa: ARG001 +) -> None: + """Test the export_requirements_files function.""" + with patch("subprocess.check_call") as mock_subproc_call: + dependency_groups = ["dev:dev_output"] + (repo_root_dir / "dev_output").mkdir() + (repo_root_dir / "dev_output" / "requirements.txt").touch() + + export_requirements_files(PYTHON_EXECUTABLE, dependency_groups) + + # Check the calls to subprocess.check_call + expected_calls = [ + call([PYTHON_EXECUTABLE, "-m", "poetry", "config", "warnings.export", "false"]), + call( + [ + PYTHON_EXECUTABLE, + "-m", + "poetry", + "export", + "--only", + "dev", + "--without-hashes", + "--output", + "dev_output/requirements.txt", + ] + ), + ] + assert mock_subproc_call.call_count == 2 + mock_subproc_call.assert_has_calls(expected_calls, any_order=True) + + +def test_main( + repo_root_dir: Path, # noqa: ARG001 + monkeypatch: pytest.MonkeyPatch, # noqa: ARG001 +) -> None: + """Test the main function.""" + with patch("subprocess.check_call") as mock_subproc_call: + # Call the main function + main() + assert mock_subproc_call.called + assert mock_subproc_call.call_count == 9 + + +def test_main_no_install_dependencies( + repo_root_dir: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test the main function.""" + with patch( + "subprocess.check_call" + ) as mock_subproc_call, monkeypatch.context() as mocked_context: + mocked_context.setenv("INPUT_EXPORT-DEPENDENCY-GROUPS", "") + mocked_context.setenv("INPUT_PRE-COMMIT-HOOK-SKIP-LIST", "") + mocked_context.setenv("INPUT_INSTALL-DEPENDENCIES", "false") + mocked_context.setenv("INPUT_RUN-PRE-COMMIT", "false") + mocked_context.setenv("INPUT_UPDATE-PRE-COMMIT", "false") + + main() + + expected_calls = [ + call([PYTHON_EXECUTABLE, "-m", "poetry", "remove", "--lock", "--group=dev", "pytest"]), + call( + [ + PYTHON_EXECUTABLE, + "-m", + "poetry", + "add", + "--group=dev", + "pytest==1.0.0", + "--lock", + ] + ), + call([PYTHON_EXECUTABLE, "-m", "poetry", "update", "--lock"]), + call( + [ + f"{Path(PYTHON_EXECUTABLE).parent.as_posix()}/toml-sort", + f"{repo_root_dir.as_posix()}/pyproject.toml", + "--in-place", + "--sort-table-keys", + ] + ), + ] + assert mock_subproc_call.call_count == 4 + mock_subproc_call.assert_has_calls(expected_calls, any_order=True) + + +def test_sort_requirements_file(tmp_path: Path) -> None: + """Test the sort_requirements_file function.""" + # Create an unsorted requirements file + unsorted_content = ["Flask==1.1.2\n", "requests==2.24.0\n", "pytest==6.0.1\n", "Django==3.1\n"] + + sorted_content = ["Django==3.1\n", "Flask==1.1.2\n", "pytest==6.0.1\n", "requests==2.24.0\n"] + + requirements_file = tmp_path / "requirements.txt" + requirements_file.write_text("".join(unsorted_content)) + + # Call the function to sort the file + sort_requirements_file(requirements_file) + + # Read the sorted file content + sorted_file_content = requirements_file.read_text().splitlines(keepends=True) + + # Compare the sorted file content to the expected sorted content + assert sorted_file_content == sorted_content + + +def test_convert_dict_input_valid() -> None: + """Test convert_dict_input with valid input.""" + input_str = '{"dev": ["pytest", "mock"], "prod": ["django"]}' + expected_output = {"dev": ["pytest", "mock"], "prod": ["django"]} + assert convert_dict_input(input_str) == expected_output + + +def test_convert_dict_input_invalid_json() -> None: + """Test convert_dict_input with invalid JSON input.""" + input_str = '{"dev": ["pytest", "mock", "prod": ["django"]}' + with pytest.raises( + ValueError, + match=r'Input "{.*}" does not match the required type of `dict\[str, list\[str\]\]`.', + ): + convert_dict_input(input_str) + + +def test_convert_dict_input_invalid_structure() -> None: + """Test convert_dict_input with invalid dictionary structure.""" + input_str = '{"dev": "pytest, mock", "prod": "django"}' + with pytest.raises( + ValueError, + match=r'Input "{.*}" does not match the required type of `dict\[str, list\[str\]\]`.', + ): + convert_dict_input(input_str) + + +def test_convert_dict_input_non_string_keys() -> None: + """Test convert_dict_input with non-string keys.""" + input_str = '{1: ["pytest", "mock"], "prod": ["django"]}' + with pytest.raises( + ValueError, + match=r'Input "{.*}" does not match the required type of `dict\[str, list\[str\]\]`.', + ): + convert_dict_input(input_str) + + +def test_convert_dict_input_non_string_values() -> None: + """Test convert_dict_input with non-string values.""" + input_str = '{"dev": ["pytest", 123], "prod": ["django"]}' + with pytest.raises( + ValueError, + match=r'Input "{.*}" does not match the required type of `dict\[str, list\[str\]\]`.', + ): + convert_dict_input(input_str) + + +def test_convert_dict_input_empty_dict() -> None: + """Test convert_dict_input with empty dictionary.""" + input_str = "{}" + expected_output = {} + assert convert_dict_input(input_str) == expected_output + + +def test_convert_dict_input_empty_lists() -> None: + """Test convert_dict_input with empty lists.""" + input_str = '{"dev": [], "prod": []}' + expected_output: dict[str, list[str]] = {"dev": [], "prod": []} + assert convert_dict_input(input_str) == expected_output diff --git a/workflows/package-release.md b/workflows/package-release.md index b4e17185..c7be08e8 100644 --- a/workflows/package-release.md +++ b/workflows/package-release.md @@ -50,7 +50,7 @@ will be used to fill in the GitHub Release Notes. > This workflow uses the following GitHub Actions: > > - [actions/checkout](https://github.com/actions/checkout) -> - [tektronix/python-package-ci-cd/actions/find-unreleased-changelog-items](https://github.com/tektronix/python-package-ci-cd) +> - [tektronix/python-package-ci-cd/actions/find_unreleased_changelog_items](https://github.com/tektronix/python-package-ci-cd) > - [python-semantic-release/python-semantic-release](https://github.com/python-semantic-release/python-semantic-release) > - [hynek/build-and-inspect-python-package](https://github.com/hynek/build-and-inspect-python-package) > - [actions/download-artifact](https://github.com/actions/download-artifact) diff --git a/workflows/package-testpypi.md b/workflows/package-testpypi.md index 9d13e0da..b92c85ae 100644 --- a/workflows/package-testpypi.md +++ b/workflows/package-testpypi.md @@ -28,7 +28,7 @@ officially released version of the package, incrementing `N` each time the workf > This workflow uses the following GitHub Actions: > > - [actions/checkout](https://github.com/actions/checkout) -> - [tektronix/python-package-ci-cd/actions/create-unique-testpypi-version](https://github.com/tektronix/python-package-ci-cd) +> - [tektronix/python-package-ci-cd/actions/create_unique_testpypi_version](https://github.com/tektronix/python-package-ci-cd) > - [hynek/build-and-inspect-python-package](https://github.com/hynek/build-and-inspect-python-package) > - [actions/download-artifact](https://github.com/actions/download-artifact) > - [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) diff --git a/workflows/update-python-and-pre-commit-dependencies.md b/workflows/update-python-and-pre-commit-dependencies.md index 42364d1b..a9101e7b 100644 --- a/workflows/update-python-and-pre-commit-dependencies.md +++ b/workflows/update-python-and-pre-commit-dependencies.md @@ -19,7 +19,7 @@ updates for the Python dependencies. > > - [actions/checkout](https://github.com/actions/checkout) > - [crazy-max/ghaction-import-gpg](https://github.com/crazy-max/ghaction-import-gpg) -> - [tektronix/python-package-ci-cd/actions/update-development-dependencies](https://github.com/tektronix/python-package-ci-cd) +> - [tektronix/python-package-ci-cd/actions/update_development_dependencies](https://github.com/tektronix/python-package-ci-cd) > - [stefanzweifel/git-auto-commit-action](https://github.com/stefanzweifel/git-auto-commit-action) > > See the [Workflow file][workflow-file] for the currently used versions of each GitHub Action.