diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml index cd9c549494e..4d052bf1a47 100644 --- a/.github/workflows/build-docker-images-for-testing.yml +++ b/.github/workflows/build-docker-images-for-testing.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false @@ -28,14 +28,14 @@ jobs: run: echo "IMAGE_REPOSITORY=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 with: buildkitd-flags: --debug driver-opts: image=moby/buildkit:master # needed to get the fix for https://github.com/moby/buildkit/issues/2426 - name: Build id: docker_build - uses: docker/build-push-action@v6 + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 timeout-minutes: 10 env: DOCKER_BUILD_CHECKS_ANNOTATIONS: false @@ -49,7 +49,7 @@ jobs: # export docker images to be used in next jobs below - name: Upload image ${{ matrix.docker-image }} as artifact timeout-minutes: 10 - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 with: name: built-docker-image-${{ matrix.docker-image }}-${{ matrix.os }} path: ${{ matrix.docker-image }}-${{ matrix.os }}_img diff --git a/.github/workflows/cancel-outdated-workflow-runs.yml b/.github/workflows/cancel-outdated-workflow-runs.yml index d9e0ec074ad..1f984efa2ea 100644 --- a/.github/workflows/cancel-outdated-workflow-runs.yml +++ b/.github/workflows/cancel-outdated-workflow-runs.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 3 steps: - - uses: styfle/cancel-workflow-action@0.12.1 + - uses: styfle/cancel-workflow-action@85880fa0301c86cca9da44039ee3bb12d3bedbfa # 0.12.1 with: workflow_id: 'integration-tests.yml,k8s-testing.yml,unit-tests.yml' access_token: ${{ github.token }} diff --git a/.github/workflows/detect-merge-conflicts.yaml b/.github/workflows/detect-merge-conflicts.yaml index 83041158702..934543cec4e 100644 --- a/.github/workflows/detect-merge-conflicts.yaml +++ b/.github/workflows/detect-merge-conflicts.yaml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: check if prs are conflicted - uses: eps1lon/actions-label-merge-conflict@v3 + uses: eps1lon/actions-label-merge-conflict@1b1b1fcde06a9b3d089f3464c96417961dde1168 # v3.0.2 with: dirtyLabel: "conflicts-detected" repoToken: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index 5ec0aa9abad..5257e92aab3 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -22,7 +22,7 @@ jobs: file-type: [yaml, json] steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: release/${{ env.release_version }} @@ -51,7 +51,7 @@ jobs: run: docker compose down - name: Upload oas.${{ matrix.file-type }} as artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 with: name: oas-${{ matrix.file-type }} path: oas.${{ matrix.file-type }} diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 42f12417a39..888cd7eb3e4 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -13,18 +13,18 @@ jobs: runs-on: ubuntu-latest steps: - name: Setup Hugo - uses: peaceiris/actions-hugo@v3 + uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3.0.0 with: hugo-version: '0.125.3' extended: true - name: Setup Node - uses: actions/setup-node@v4 + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 with: node-version: '22.5.1' - name: Cache dependencies - uses: actions/cache@v4 + uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} @@ -32,14 +32,14 @@ jobs: ${{ runner.os }}-node- - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: submodules: recursive fetch-depth: 0 - name: Setup Pages id: pages - uses: actions/configure-pages@v4 + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5.0.0 - use this after https://github.com/DefectDojo/django-DefectDojo/pull/11329 - name: Install dependencies run: cd docs && npm ci @@ -51,7 +51,7 @@ jobs: run: cd docs && hugo --minify --gc --config config/production/hugo.toml - name: Deploy - uses: peaceiris/actions-gh-pages@v4 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: # publishes to the `gh-pages` branch by default github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs/public diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index cd8d8072377..c60cb6f3403 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -41,11 +41,11 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: path: built-docker-image pattern: built-docker-image-* diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index a4feb77273f..3f169002efb 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -32,10 +32,10 @@ jobs: os: debian steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.13.0 + uses: manusa/actions-setup-minikube@0e8062ceff873bd77979f39cf8fd3621416afe4d # v2.13.0 with: minikube version: 'v1.33.1' kubernetes version: ${{ matrix.k8s }} @@ -48,7 +48,7 @@ jobs: minikube status - name: Load images from artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: path: built-docker-image pattern: built-docker-image-* diff --git a/.github/workflows/plantuml.yml b/.github/workflows/plantuml.yml index 5fa308ffb41..6beb590899b 100644 --- a/.github/workflows/plantuml.yml +++ b/.github/workflows/plantuml.yml @@ -13,7 +13,7 @@ jobs: UML_FILES: ".puml" steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false @@ -33,7 +33,7 @@ jobs: with: args: -v -tpng ${{ steps.getfile.outputs.files }} - name: Push Local Changes - uses: stefanzweifel/git-auto-commit-action@v5.0.1 + uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1 with: commit_user_name: "PlantUML_bot" commit_user_email: "noreply@defectdojo.org" diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 34a31a0cab2..cde6795db05 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -15,7 +15,7 @@ jobs: name: "Autolabeler" runs-on: ubuntu-latest steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" sync-labels: true diff --git a/.github/workflows/release-1-create-pr.yml b/.github/workflows/release-1-create-pr.yml index c93b1d0ee69..5b65c02ec93 100644 --- a/.github/workflows/release-1-create-pr.yml +++ b/.github/workflows/release-1-create-pr.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Checkout from_branch branch - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.event.inputs.from_branch }} @@ -45,7 +45,7 @@ jobs: run: git push origin HEAD:${NEW_BRANCH} - name: Checkout release branch - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ env.NEW_BRANCH }} @@ -75,7 +75,7 @@ jobs: grep -H version helm/defectdojo/Chart.yaml - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@v5.0.1 + uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" @@ -88,7 +88,7 @@ jobs: - name: Create Pull Request env: REPO_ORG: ${{ env.repoorg }} - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/release-2-tag-docker-push.yml b/.github/workflows/release-2-tag-docker-push.yml index f6f021fcaa9..bd06d3b920a 100644 --- a/.github/workflows/release-2-tag-docker-push.yml +++ b/.github/workflows/release-2-tag-docker-push.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: master diff --git a/.github/workflows/release-3-master-into-dev.yml b/.github/workflows/release-3-master-into-dev.yml index 012781f45e4..ede4cf33d37 100644 --- a/.github/workflows/release-3-master-into-dev.yml +++ b/.github/workflows/release-3-master-into-dev.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Checkout master - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: master @@ -38,7 +38,7 @@ jobs: run: git push origin HEAD:${NEW_BRANCH} - name: Checkout new branch - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ env.NEW_BRANCH }} @@ -73,7 +73,7 @@ jobs: if: endsWith(github.event.inputs.release_number_new, '.0') && endsWith(github.event.inputs.release_number_dev, '.0-dev') - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@v5.0.1 + uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" @@ -86,7 +86,7 @@ jobs: - name: Create Pull Request env: REPO_ORG: ${{ env.repoorg }} - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -103,7 +103,7 @@ jobs: steps: - name: Checkout master - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: master @@ -120,7 +120,7 @@ jobs: run: git push origin HEAD:${NEW_BRANCH} - name: Checkout new branch - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ env.NEW_BRANCH }} @@ -139,7 +139,7 @@ jobs: grep version components/package.json - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@v5.0.1 + uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" @@ -152,7 +152,7 @@ jobs: - name: Create Pull Request env: REPO_ORG: ${{ env.repoorg }} - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 0e42769cd76..7802bfdc1b2 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Create Release id: create_release - uses: release-drafter/release-drafter@v6.0.0 + uses: release-drafter/release-drafter@3f0f87098bd6b5c5b9a36d49c41d998ea58f9348 # v6.0.0 with: version: ${{ github.event.inputs.version }} env: @@ -47,13 +47,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Load OAS files from artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: pattern: oas-* - name: Upload Release Asset - OpenAPI Specification - YAML id: upload-release-asset-yaml - uses: actions/upload-release-asset@v1 + uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 # v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -64,7 +64,7 @@ jobs: - name: Upload Release Asset - OpenAPI Specification - JSON id: upload-release-asset-json - uses: actions/upload-release-asset@v1 + uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 # v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index 6f8862b6216..af42a46db5d 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -32,13 +32,13 @@ jobs: platform: [amd64] steps: - name: Login to DockerHub - uses: docker/login-action@v3 + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Checkout tag - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.event.inputs.release_number }} @@ -47,11 +47,11 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 - name: Build and push images with debian if: ${{ matrix.os == 'debian' }} - uses: docker/build-push-action@v6 + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 env: DOCKER_BUILD_CHECKS_ANNOTATIONS: false REPO_ORG: ${{ env.repoorg }} @@ -64,7 +64,7 @@ jobs: - name: Build and push images with alpine if: ${{ matrix.os == 'alpine' }} - uses: docker/build-push-action@v6 + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 env: DOCKER_BUILD_CHECKS_ANNOTATIONS: false REPO_ORG: ${{ env.repoorg }} diff --git a/.github/workflows/release-x-manual-helm-chart.yml b/.github/workflows/release-x-manual-helm-chart.yml index 560e809e843..27b7edab0ce 100644 --- a/.github/workflows/release-x-manual-helm-chart.yml +++ b/.github/workflows/release-x-manual-helm-chart.yml @@ -28,7 +28,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: master fetch-depth: 0 @@ -56,7 +56,7 @@ jobs: helm dependency update ./helm/defectdojo - name: Add yq - uses: mikefarah/yq@master + uses: mikefarah/yq@4839dbbf80445070a31c7a9c1055da527db2d5ee # v4.44.6 - name: Pin version docker version id: pin_image @@ -73,7 +73,7 @@ jobs: echo "chart_version=$(ls build | cut -d '-' -f 2 | sed 's|\.tgz||')" >> $GITHUB_ENV - name: Create release ${{ github.event.inputs.release_number }} - uses: softprops/action-gh-release@v2 + uses: softprops/action-gh-release@7b4da11513bf3f43f9999e90eabced41ab8bb048 # v2.2.0 with: name: '${{ github.event.inputs.release_number }} 🌈' tag_name: ${{ github.event.inputs.release_number }} diff --git a/.github/workflows/release_drafter_valentijn.yml b/.github/workflows/release_drafter_valentijn.yml index 0ac52a0466a..7ce4c2813a6 100644 --- a/.github/workflows/release_drafter_valentijn.yml +++ b/.github/workflows/release_drafter_valentijn.yml @@ -20,7 +20,7 @@ jobs: update_release_draft: runs-on: ubuntu-latest steps: - - uses: valentijnscholten/release-drafter@master + - uses: valentijnscholten/release-drafter@master # TODO: not maintained anymore - missing part is maybe already solved in the upstream with: version: ${{github.event.inputs.version}} previous-version: ${{github.event.inputs.previous-version}} diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index bd8ca3322fa..63056587431 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -14,13 +14,13 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: path: built-docker-image pattern: built-docker-image-* diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index 04799cdd003..59652fa7332 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Ruff Linter run: pip install -r requirements-lint.txt diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 4a37d71b562..8657b48c68a 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Grab shellcheck run: | diff --git a/.github/workflows/test-helm-chart.yml b/.github/workflows/test-helm-chart.yml index 5bf20169328..75cf1186411 100644 --- a/.github/workflows/test-helm-chart.yml +++ b/.github/workflows/test-helm-chart.yml @@ -14,15 +14,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false fetch-depth: 0 - name: Set up Helm - uses: azure/setup-helm@v4.2.0 + uses: azure/setup-helm@fe7b79cd5ee1e45176fcad797de68ecaf3ca4814 # v4.2.0 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: 3.9 @@ -33,7 +33,7 @@ jobs: helm dependency update ./helm/defectdojo - name: Set up chart-testing - uses: helm/chart-testing-action@v2.6.1 + uses: helm/chart-testing-action@e6669bcd63d7cb57cb4380c33043eebe5d111992 # v2.6.1 with: yamale_version: 4.0.4 yamllint_version: 1.35.1 diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index 17abb7c3f82..9c266b9734a 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -140,7 +140,7 @@ COPY manage.py ./ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 +FROM nginx:1.27.3-alpine@sha256:41523187cf7d7a2f2677a80609d9caa14388bf5c1fbca9c410ba3de602aaaab4 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian index b062e28e10e..f55d77bfe8f 100644 --- a/Dockerfile.nginx-debian +++ b/Dockerfile.nginx-debian @@ -73,7 +73,7 @@ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 +FROM nginx:1.27.3-alpine@sha256:41523187cf7d7a2f2677a80609d9caa14388bf5c1fbca9c410ba3de602aaaab4 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/docker-compose.yml b/docker-compose.yml index aac7a98f7ba..c0cc991e6ca 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,6 +16,8 @@ services: - uwsgi environment: NGINX_METRICS_ENABLED: "${NGINX_METRICS_ENABLED:-false}" + DD_UWSGI_HOST: "${DD_UWSGI_HOST:-uwsgi}" + DD_UWSGI_PORT: "${DD_UWSGI_PORT:-3031}" volumes: - defectdojo_media:/usr/share/nginx/html/media ports: @@ -103,7 +105,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:17.2-alpine@sha256:e7897baa70dae1968d23d785adb4aeb699175e0bcaae44f98a7083ecb9668b93 + image: postgres:17.2-alpine@sha256:d37d2c160d34430877c802e5adc22824a2ad453499db9bab1a2ceb2be6c1a46f environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} diff --git a/docs/content/en/open_source/upgrading/2.36.md b/docs/content/en/open_source/upgrading/2.36.md index ceaa8c77d14..86671001e0d 100644 --- a/docs/content/en/open_source/upgrading/2.36.md +++ b/docs/content/en/open_source/upgrading/2.36.md @@ -5,7 +5,7 @@ weight: -20240603 description: Breaking Change for HELM deployments with PostgreSQL --- -Previous HELM deployments (HELM chart `<=1.6.136`, DefectDojo `<=2.35.4`) used a pinned version of PostgreSQL in versions `11.x`. These are incompatible with Django in version `4.2` (used from DefectDojo version `3.36.0`; HELM chart `1.6.137`). Because of this, it is necessary to upgrade PostgreSQL to version `12.x` or higher. DefectDojo in version `3.36.1` (HELM chart `1.6.138`) uses this new version of PostgreSQL. +Previous HELM deployments (HELM chart `<=1.6.136`, DefectDojo `<=2.35.4`) used a pinned version of PostgreSQL in versions `11.x`. These are incompatible with Django in version `4.2` (used from DefectDojo version `2.36.0`; HELM chart `1.6.137`). Because of this, it is necessary to upgrade PostgreSQL to version `12.x` or higher. DefectDojo in version `2.36.1` (HELM chart `1.6.138`) uses this new version of PostgreSQL. Unfortunately, an upgrade of PostgreSQL is not enough because PostgreSQL does not support automatic migration of data structures in the filesystem. Because of this, migration is needed. There are different ways (many of them similar to migration between different database backends (e.g. from MySQL to PostgreSQL)). Please find inspiration and the best fitting way for you in: diff --git a/docs/content/en/open_source/upgrading/2.42.md b/docs/content/en/open_source/upgrading/2.42.md new file mode 100644 index 00000000000..c815a7794d3 --- /dev/null +++ b/docs/content/en/open_source/upgrading/2.42.md @@ -0,0 +1,7 @@ +--- +title: 'Upgrading to DefectDojo Version 2.42.x' +toc_hide: true +weight: -20241104 +description: No special instructions. +--- +There are no special instructions for upgrading to 2.42.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.42.0) for the contents of the release. diff --git a/docs/package-lock.json b/docs/package-lock.json index 187c86624d8..5755bac0791 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -2394,9 +2394,9 @@ } }, "node_modules/@tabler/icons": { - "version": "3.23.0", - "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.23.0.tgz", - "integrity": "sha512-Cz+X58jfRm0g/KcupXXuPw5knj671lNR054AnmLXvCjudiQBWI0wZulDDSsqDoGezvBzMTNPQtNcjLkZs82ZxQ==", + "version": "3.24.0", + "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.24.0.tgz", + "integrity": "sha512-qNis9e90QcdxAGV3wNIeX0Ba2R7ktm0cnqOToKHJfC2kj3fvJwEVLsw63K0/fm7NW8rSZjDSTQRmMnSg8g/wrg==", "license": "MIT", "funding": { "type": "github", @@ -3986,9 +3986,9 @@ "license": "MIT" }, "node_modules/prettier": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.1.tgz", - "integrity": "sha512-G+YdqtITVZmOJje6QkXQWzl3fSfMxFwm1tjTyo9exhkmWSqC4Yhd1+lug++IlR2mvRVAxEDDWYkQdeSztajqgg==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", + "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", "dev": true, "license": "MIT", "bin": { @@ -4700,9 +4700,9 @@ "license": "MIT" }, "node_modules/vite": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.1.tgz", - "integrity": "sha512-Ldn6gorLGr4mCdFnmeAOLweJxZ34HjKnDm4HGo6P66IEqTxQb36VEdFJQENKxWjupNfoIjvRUnswjn1hpYEpjQ==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.3.tgz", + "integrity": "sha512-Cmuo5P0ENTN6HxLSo6IHsjCLn/81Vgrp81oaiFFMRa8gGDj5xEjIcEpf2ZymZtZR8oU0P2JX5WuUp/rlXcHkAw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/dojo/benchmark/views.py b/dojo/benchmark/views.py index 0d0c7174b96..611c690945d 100644 --- a/dojo/benchmark/views.py +++ b/dojo/benchmark/views.py @@ -1,4 +1,3 @@ -import contextlib import logging from crum import get_current_user @@ -37,9 +36,7 @@ def add_benchmark(queryset, product): benchmark_product.product = product benchmark_product.control = requirement requirements.append(benchmark_product) - - with contextlib.suppress(Exception): - Benchmark_Product.objects.bulk_create(requirements) + Benchmark_Product.objects.bulk_create(requirements) @user_is_authorized(Product, Permissions.Benchmark_Edit, "pid") diff --git a/dojo/cred/views.py b/dojo/cred/views.py index f8f7756e340..e79dc6c6b16 100644 --- a/dojo/cred/views.py +++ b/dojo/cred/views.py @@ -1,4 +1,3 @@ -import contextlib import logging from django.contrib import messages @@ -585,9 +584,7 @@ def new_cred_finding(request, fid): @user_is_authorized(Cred_User, Permissions.Credential_Delete, "ttid") def delete_cred_controller(request, destination_url, id, ttid): - cred = None - with contextlib.suppress(Exception): - cred = Cred_Mapping.objects.get(pk=ttid) + cred = Cred_Mapping.objects.filter(pk=ttid).first() if request.method == "POST": tform = CredMappingForm(request.POST, instance=cred) message = "" diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index 0d584ff6c6c..f5f01ee4e30 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -208,8 +208,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): to_be_deleted.update(ep_ids[1:]) if change: message = "Merging Endpoints {} into '{}'".format( - [f"{str(x)} (id={x.pk})" for x in ep[1:]], - f"{str(ep[0])} (id={ep[0].pk})") + [f"{x} (id={x.pk})" for x in ep[1:]], + f"{ep[0]} (id={ep[0].pk})") html_log.append(message) logger.info(message) Endpoint_Status_model.objects\ diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index f7807e9b5ad..bf85092a129 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -503,7 +503,7 @@ def import_endpoint_meta(request, pid): endpoint_meta_import(file, product, create_endpoints, create_tags, create_dojo_meta, origin="UI", request=request) except Exception as e: logger.exception(e) - add_error_message_to_response(f"An exception error occurred during the report import:{str(e)}") + add_error_message_to_response(f"An exception error occurred during the report import:{e}") return HttpResponseRedirect(reverse("endpoint") + "?product=" + pid) add_breadcrumb(title="Endpoint Meta Importer", top_level=False, request=request) diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 4c1281d6653..66badd594dc 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -4,6 +4,7 @@ from django.conf import settings from django.db.models.query_utils import Q from django.db.models.signals import post_delete, pre_delete +from django.db.utils import IntegrityError from django.dispatch.dispatcher import receiver from django.utils import timezone from fieldsignals import pre_save_changed @@ -164,21 +165,22 @@ def create_finding_group(finds, finding_group_name): finding_group = Finding_Group(test=finds[0].test) finding_group.creator = get_current_user() - finding_group.name = finding_group_name + finding_group_name_dummy - finding_group.save() - available_findings = [find for find in finds if not find.finding_group_set.all()] - finding_group.findings.set(available_findings) - # if user provided a name, we use that, else: - # if we have components, we may set a nice name but catch 'name already exist' exceptions + if finding_group_name: + finding_group.name = finding_group_name + elif finding_group.components: + finding_group.name = finding_group.components try: - if finding_group_name: - finding_group.name = finding_group_name - elif finding_group.components: - finding_group.name = finding_group.components finding_group.save() - except: - pass + except IntegrityError as ie: + if "already exists" in str(ie): + finding_group.name = finding_group_name + finding_group_name_dummy + finding_group.save() + else: + raise + + available_findings = [find for find in finds if not find.finding_group_set.all()] + finding_group.findings.set(available_findings) added = len(available_findings) skipped = len(finds) - added diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 18faed336b0..ec25352a903 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -3010,7 +3010,7 @@ def finding_bulk_update_all(request, pid=None): success_count += 1 for error_message, error_count in error_counts.items(): - add_error_message_to_response("{error_count} finding groups could not be pushed to JIRA: {error_message}") + add_error_message_to_response(f"{error_count} finding groups could not be pushed to JIRA: {error_message}") if success_count > 0: add_success_message_to_response(f"{success_count} finding groups pushed to JIRA successfully") diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index 84d33e6ffdd..372b48fbfbe 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -552,7 +552,7 @@ def post(self, request, tid=None): url=request.build_absolute_uri(reverse("jira"))) return HttpResponseRedirect(reverse("jira")) except Exception as e: - add_error_message_to_response(f"Unable to delete JIRA Instance, probably because it is used by JIRA Issues: {str(e)}") + add_error_message_to_response(f"Unable to delete JIRA Instance, probably because it is used by JIRA Issues: {e}") rels = ["Previewing the relationships has been disabled.", ""] display_preview = get_setting("DELETE_PREVIEW") diff --git a/dojo/management/commands/rename_mend_findings.py b/dojo/management/commands/rename_mend_findings.py index 1620e5ce93b..f99f35a8027 100644 --- a/dojo/management/commands/rename_mend_findings.py +++ b/dojo/management/commands/rename_mend_findings.py @@ -33,8 +33,8 @@ def rename_mend_finding(): logger.info("######## Updating Hashcodes - deduplication is done in the background upon finding save ########") for finding in findings: logger.info("Updating Mend Finding with id: %d", finding.id) - lib_name_begin = re.search("\\*\\*Library Filename\\*\\* : ", finding.description).span(0)[1] - lib_name_end = re.search("\\*\\*Library Description\\*\\*", finding.description).span(0)[0] + lib_name_begin = re.search(r"\*\*Library Filename\*\* : ", finding.description).span(0)[1] + lib_name_end = re.search(r"\*\*Library Description\*\*", finding.description).span(0)[0] lib_name = finding.description[lib_name_begin:lib_name_end - 1] if finding.cve is None: finding.title = "CVE-None | " + lib_name diff --git a/dojo/middleware.py b/dojo/middleware.py index 9fcb8a51dbc..239a2d92f4b 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -1,5 +1,6 @@ import logging import re +from contextlib import suppress from threading import local from urllib.parse import quote @@ -56,13 +57,10 @@ def __call__(self, request): if request.user.is_authenticated: logger.debug("Authenticated user: %s", str(request.user)) - try: + with suppress(ModuleNotFoundError): # to avoid unittests to fail uwsgi = __import__("uwsgi", globals(), locals(), ["set_logvar"], 0) # this populates dd_user log var, so can appear in the uwsgi logs uwsgi.set_logvar("dd_user", str(request.user)) - except: - # to avoid unittests to fail - pass path = request.path_info.lstrip("/") from dojo.models import Dojo_User if Dojo_User.force_password_reset(request.user) and path != "change_password": diff --git a/dojo/models.py b/dojo/models.py index fe48896daa6..99074a9cf3b 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -1619,7 +1619,7 @@ class Meta: ] def __str__(self): - return f"'{str(self.finding)}' on '{str(self.endpoint)}'" + return f"'{self.finding}' on '{self.endpoint}'" def copy(self, finding=None): copy = self diff --git a/dojo/pipeline.py b/dojo/pipeline.py index befabc0e836..91dc1500089 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -107,7 +107,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): def is_group_id(group): - return bool(re.search("^[a-zA-Z0-9]{8,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{12,}$", group)) + return bool(re.search(r"^[a-zA-Z0-9]{8,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{12,}$", group)) def assign_user_to_groups(user, group_names, social_provider): diff --git a/dojo/product/helpers.py b/dojo/product/helpers.py index 13c512c9c90..d8285cfb92b 100644 --- a/dojo/product/helpers.py +++ b/dojo/product/helpers.py @@ -54,5 +54,5 @@ def propagate_tags_on_product_sync(product): def propagate_tags_on_object_list(object_list): for obj in object_list: if obj and obj.id is not None: - logger.debug(f"\tPropagating tags to {str(type(obj))} - {str(obj)}") + logger.debug(f"\tPropagating tags to {type(obj)} - {obj}") obj.save() diff --git a/dojo/product/views.py b/dojo/product/views.py index 8c20b50627a..654169363dc 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -1,7 +1,6 @@ # # product import base64 import calendar as tcalendar -import contextlib import logging from collections import OrderedDict from datetime import date, datetime, timedelta @@ -958,8 +957,7 @@ def edit_product(request, pid): if get_system_setting("enable_github") and github_inst: gform = GITHUB_Product_Form(request.POST, instance=github_inst) - # need to handle delete - with contextlib.suppress(Exception): + if gform.is_valid(): gform.save() elif get_system_setting("enable_github"): gform = GITHUB_Product_Form(request.POST) diff --git a/dojo/product_type/views.py b/dojo/product_type/views.py index 8d731245ddc..e011ee4fb93 100644 --- a/dojo/product_type/views.py +++ b/dojo/product_type/views.py @@ -13,7 +13,7 @@ from dojo.authorization.authorization import user_has_permission from dojo.authorization.authorization_decorators import user_has_global_permission, user_is_authorized from dojo.authorization.roles_permissions import Permissions -from dojo.filters import ProductTypeFilter +from dojo.filters import ProductFilter, ProductFilterWithoutObjectLookups, ProductTypeFilter from dojo.forms import ( Add_Product_Type_GroupForm, Add_Product_Type_MemberForm, @@ -38,6 +38,7 @@ async_delete, get_page_items, get_setting, + get_system_setting, is_title_in_breadcrumbs, ) @@ -51,7 +52,6 @@ def product_type(request): - prod_types = get_authorized_product_types(Permissions.Product_Type_View) name_words = prod_types.values_list("name", flat=True) @@ -123,12 +123,17 @@ def view_product_type(request, ptid): groups = get_authorized_groups_for_product_type(pt, Permissions.Product_Type_View) global_groups = get_authorized_global_groups_for_product_type(pt, Permissions.Product_Type_View) products = get_authorized_products(Permissions.Product_View).filter(prod_type=pt) - products = get_page_items(request, products, 25) + filter_string_matching = get_system_setting("filter_string_matching", False) + filter_class = ProductFilterWithoutObjectLookups if filter_string_matching else ProductFilter + prod_filter = filter_class(request.GET, queryset=products, user=request.user) + products = get_page_items(request, prod_filter.qs, 25) + add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/view_product_type.html", { "name": page_name, "pt": pt, "products": products, + "prod_filter": prod_filter, "groups": groups, "members": members, "global_groups": global_groups, diff --git a/dojo/reports/views.py b/dojo/reports/views.py index f258db9db2f..061476efe1b 100644 --- a/dojo/reports/views.py +++ b/dojo/reports/views.py @@ -876,7 +876,7 @@ def get(self, request): num_endpoints = 0 for endpoint in finding.endpoints.all(): num_endpoints += 1 - endpoint_value += f"{str(endpoint)}; " + endpoint_value += f"{endpoint}; " endpoint_value = endpoint_value.removesuffix("; ") if len(endpoint_value) > EXCEL_CHAR_LIMIT: endpoint_value = endpoint_value[:EXCEL_CHAR_LIMIT - 3] + "..." @@ -889,7 +889,7 @@ def get(self, request): if num_vulnerability_ids > 5: vulnerability_ids_value += "..." break - vulnerability_ids_value += f"{str(vulnerability_id)}; " + vulnerability_ids_value += f"{vulnerability_id}; " if finding.cve and vulnerability_ids_value.find(finding.cve) < 0: vulnerability_ids_value += finding.cve vulnerability_ids_value = vulnerability_ids_value.removesuffix("; ") @@ -902,7 +902,7 @@ def get(self, request): if num_tags > 5: tags_value += "..." break - tags_value += f"{str(tag)}; " + tags_value += f"{tag}; " tags_value = tags_value.removesuffix("; ") fields.append(tags_value) @@ -1025,7 +1025,7 @@ def get(self, request): num_endpoints = 0 for endpoint in finding.endpoints.all(): num_endpoints += 1 - endpoint_value += f"{str(endpoint)}; \n" + endpoint_value += f"{endpoint}; \n" endpoint_value = endpoint_value.removesuffix("; \n") if len(endpoint_value) > EXCEL_CHAR_LIMIT: endpoint_value = endpoint_value[:EXCEL_CHAR_LIMIT - 3] + "..." @@ -1039,7 +1039,7 @@ def get(self, request): if num_vulnerability_ids > 5: vulnerability_ids_value += "..." break - vulnerability_ids_value += f"{str(vulnerability_id)}; \n" + vulnerability_ids_value += f"{vulnerability_id}; \n" if finding.cve and vulnerability_ids_value.find(finding.cve) < 0: vulnerability_ids_value += finding.cve vulnerability_ids_value = vulnerability_ids_value.removesuffix("; \n") @@ -1048,7 +1048,7 @@ def get(self, request): # tags tags_value = "" for tag in finding.tags.all(): - tags_value += f"{str(tag)}; \n" + tags_value += f"{tag}; \n" tags_value = tags_value.removesuffix("; \n") worksheet.cell(row=row_num, column=col_num, value=tags_value) col_num += 1 diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 483688dcd4e..1d33fe8dfda 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1770,6 +1770,7 @@ def saml2_attrib_map_format(dict): "KHV": "https://avd.aquasec.com/misconfig/kubernetes/", # e.g. https://avd.aquasec.com/misconfig/kubernetes/khv045 "CAPEC": "https://capec.mitre.org/data/definitions/&&.html", # e.g. https://capec.mitre.org/data/definitions/157.html "CWE": "https://cwe.mitre.org/data/definitions/&&.html", # e.g. https://cwe.mitre.org/data/definitions/79.html + "GLSA": "https://security.gentoo.org/", # e.g. https://security.gentoo.org/glsa/202409-32 "RLSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RLSA-2024:7001 "RLBA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RLBA-2024:6968 } diff --git a/dojo/templates/dojo/view_product_type.html b/dojo/templates/dojo/view_product_type.html index 70e5058350a..4cff7efa44f 100644 --- a/dojo/templates/dojo/view_product_type.html +++ b/dojo/templates/dojo/view_product_type.html @@ -54,24 +54,28 @@

{% trans "Description" %}

{% trans "Products" %}

- {% if pt|has_object_permission:"Product_Type_Add_Product" %} - {% endif %}
+
+ {% include "dojo/filter_snippet.html" with form=prod_filter.form %} +
{% if products %}
{% include "dojo/paging_snippet.html" with page=products page_size=True %} @@ -325,3 +329,6 @@

{% endblock %} +{% block postscript %} + {% include "dojo/filter_js_snippet.html" %} +{% endblock %} \ No newline at end of file diff --git a/dojo/templatetags/display_tags.py b/dojo/templatetags/display_tags.py index df0ed46f538..94bcf80d6e5 100644 --- a/dojo/templatetags/display_tags.py +++ b/dojo/templatetags/display_tags.py @@ -431,13 +431,12 @@ def pic_token(context, image, size): @register.filter def inline_image(image_file): - try: - if img_type := mimetypes.guess_type(image_file.file.name)[0]: - if img_type.startswith("image/"): - img_data = base64.b64encode(image_file.file.read()) - return f"data:{img_type};base64, {img_data.decode('utf-8')}" - except: - pass + # TODO: This code might need better exception handling or data processing + if img_types := mimetypes.guess_type(image_file.file.name): + img_type = img_types[0] + if img_type.startswith("image/"): + img_data = base64.b64encode(image_file.file.read()) + return f"data:{img_type};base64, {img_data.decode('utf-8')}" return "" @@ -777,6 +776,8 @@ def vulnerability_url(vulnerability_id): for key in settings.VULNERABILITY_URLS: if vulnerability_id.upper().startswith(key): + if key == "GLSA": + return settings.VULNERABILITY_URLS[key] + str(vulnerability_id.replace("GLSA-", "glsa/")) if key in ["AVD", "KHV", "C-"]: return settings.VULNERABILITY_URLS[key] + str(vulnerability_id.lower()) if "&&" in settings.VULNERABILITY_URLS[key]: diff --git a/dojo/tools/api_bugcrowd/importer.py b/dojo/tools/api_bugcrowd/importer.py index e47dba8a409..d83d1edf3cd 100644 --- a/dojo/tools/api_bugcrowd/importer.py +++ b/dojo/tools/api_bugcrowd/importer.py @@ -16,7 +16,7 @@ class BugcrowdApiImporter: def get_findings(self, test): client, config = self.prepare_client(test) logger.debug( - f"Fetching submissions program {str(config.service_key_1)} and target {str(config.service_key_2)}", + f"Fetching submissions program {config.service_key_1} and target {config.service_key_2}", ) submissions_paged = client.get_findings( diff --git a/dojo/tools/api_bugcrowd/parser.py b/dojo/tools/api_bugcrowd/parser.py index da06880fd27..1bb5a28bd37 100644 --- a/dojo/tools/api_bugcrowd/parser.py +++ b/dojo/tools/api_bugcrowd/parser.py @@ -155,7 +155,7 @@ def get_findings(self, file, test): finding.unsaved_endpoints = [bug_endpoint] except Exception as e: logger.error( - f"{str(bug_endpoint)} bug url from bugcrowd failed to parse to endpoint, error= {e}", + f"{bug_endpoint} bug url from bugcrowd failed to parse to endpoint, error= {e}", ) except ValidationError: logger.error( diff --git a/dojo/tools/appcheck_web_application_scanner/engines/base.py b/dojo/tools/appcheck_web_application_scanner/engines/base.py index e07433c2946..84523b90435 100644 --- a/dojo/tools/appcheck_web_application_scanner/engines/base.py +++ b/dojo/tools/appcheck_web_application_scanner/engines/base.py @@ -205,7 +205,7 @@ def parse_initial_date(self, finding: Finding, value: str) -> None: ##### # For parsing CVEs ##### - CVE_PATTERN = re.compile("CVE-[0-9]+-[0-9]+", re.IGNORECASE) + CVE_PATTERN = re.compile(r"CVE-[0-9]+-[0-9]+", re.IGNORECASE) def is_cve(self, c: str) -> bool: return bool(c and isinstance(c, str) and self.CVE_PATTERN.fullmatch(c)) diff --git a/dojo/tools/blackduck/parser.py b/dojo/tools/blackduck/parser.py index a6a127fcdb3..30954bc8d87 100644 --- a/dojo/tools/blackduck/parser.py +++ b/dojo/tools/blackduck/parser.py @@ -89,10 +89,10 @@ def format_title(self, i): return f"{i.vuln_id} - {component_title}" def format_description(self, i): - description = f"Published on: {str(i.published_date)}\n\n" - description += f"Updated on: {str(i.updated_date)}\n\n" - description += f"Base score: {str(i.base_score)}\n\n" - description += f"Exploitability: {str(i.exploitability)}\n\n" + description = f"Published on: {i.published_date}\n\n" + description += f"Updated on: {i.updated_date}\n\n" + description += f"Base score: {i.base_score}\n\n" + description += f"Exploitability: {i.exploitability}\n\n" description += f"Description: {i.description}\n" return description diff --git a/dojo/tools/blackduck_binary_analysis/parser.py b/dojo/tools/blackduck_binary_analysis/parser.py index 77f9647fc6f..b0ccd0b9642 100644 --- a/dojo/tools/blackduck_binary_analysis/parser.py +++ b/dojo/tools/blackduck_binary_analysis/parser.py @@ -115,30 +115,30 @@ def format_title(self, i): return title def format_description(self, i): - description = f"CSV Result: {str(i.report_name)}\n" - description += f"Vulnerable Component: {str(i.component)}\n" - description += f"Vulnerable Component Version in Use: {str(i.version)}\n" - description += f"Vulnerable Component Latest Version: {str(i.latest_version)}\n" - description += f"Matching Type: {str(i.matching_type)}\n" - description += f"Object Name: {str(i.object_name)}\n" - description += f"Object Extraction Path: {str(i.object_full_path)}\n" - description += f"Object Compilation Date: {str(i.object_compilation_date)}\n" - description += f"Object SHA1: {str(i.object_sha1)}\n" - description += f"CVE: {str(i.cve)}\n" - description += f"CVE Publication Date: {str(i.cve_publication_date)}\n" - description += f"Distribution Package: {str(i.distribution_package)}\n" - description += f"Missing Exploit Mitigations: {str(i.missing_exploit_mitigations)}\n" - description += f"BDSA: {str(i.bdsa)}\n" - description += f"Summary:\n{str(i.summary)}\n" - description += f"Note Type:\n{str(i.note_type)}\n" - description += f"Note Reason:\n{str(i.note_reason)}\n" - description += f"Triage Vectors:\n{str(i.triage_vectors)}\n" - description += f"Unresolving Triage Vectors:\n{str(i.triage_vectors)}\n" + description = f"CSV Result: {i.report_name}\n" + description += f"Vulnerable Component: {i.component}\n" + description += f"Vulnerable Component Version in Use: {i.version}\n" + description += f"Vulnerable Component Latest Version: {i.latest_version}\n" + description += f"Matching Type: {i.matching_type}\n" + description += f"Object Name: {i.object_name}\n" + description += f"Object Extraction Path: {i.object_full_path}\n" + description += f"Object Compilation Date: {i.object_compilation_date}\n" + description += f"Object SHA1: {i.object_sha1}\n" + description += f"CVE: {i.cve}\n" + description += f"CVE Publication Date: {i.cve_publication_date}\n" + description += f"Distribution Package: {i.distribution_package}\n" + description += f"Missing Exploit Mitigations: {i.missing_exploit_mitigations}\n" + description += f"BDSA: {i.bdsa}\n" + description += f"Summary:\n{i.summary}\n" + description += f"Note Type:\n{i.note_type}\n" + description += f"Note Reason:\n{i.note_reason}\n" + description += f"Triage Vectors:\n{i.triage_vectors}\n" + description += f"Unresolving Triage Vectors:\n{i.triage_vectors}\n" return description def format_mitigation(self, i): - return f"Upgrade {str(i.component)} to latest version: {str(i.latest_version)}.\n" + return f"Upgrade {i.component} to latest version: {i.latest_version}.\n" def format_impact(self, i): impact = "The use of vulnerable third-party open source software in applications can have numerous negative impacts:\n\n" @@ -150,7 +150,7 @@ def format_impact(self, i): return impact def format_references(self, i): - references = f"BDSA: {str(i.bdsa)}\n" - references += f"NIST CVE Details: {str(i.vulnerability_url)}\n" + references = f"BDSA: {i.bdsa}\n" + references += f"NIST CVE Details: {i.vulnerability_url}\n" return references diff --git a/dojo/tools/burp_enterprise/parser.py b/dojo/tools/burp_enterprise/parser.py index 052d8a80f84..58b2a5a6ea6 100644 --- a/dojo/tools/burp_enterprise/parser.py +++ b/dojo/tools/burp_enterprise/parser.py @@ -162,7 +162,7 @@ def _set_or_append_content(self, finding_details: dict, header: str, div_element cleaned_item = item.split(":")[0] if ( finding_details["cwe"] is None - and (cwe_search := re.search("CWE-([0-9]*)", cleaned_item, re.IGNORECASE)) + and (cwe_search := re.search(r"CWE-([0-9]*)", cleaned_item, re.IGNORECASE)) ): finding_details["cwe"] = int(cwe_search.group(1)) if "vulnerability_ids" not in finding_details: diff --git a/dojo/tools/burp_graphql/parser.py b/dojo/tools/burp_graphql/parser.py index 9b37760e2a8..11df852dc54 100644 --- a/dojo/tools/burp_graphql/parser.py +++ b/dojo/tools/burp_graphql/parser.py @@ -219,7 +219,7 @@ def parse_evidence(self, evidence): def get_cwe(self, cwe_html): # Match only the first CWE! - cweSearch = re.search("CWE-([0-9]*)", cwe_html, re.IGNORECASE) + cweSearch = re.search(r"CWE-([0-9]*)", cwe_html, re.IGNORECASE) if cweSearch: return cweSearch.group(1) return 0 diff --git a/dojo/tools/crashtest_security/parser.py b/dojo/tools/crashtest_security/parser.py index deedb916b81..a12c194723a 100644 --- a/dojo/tools/crashtest_security/parser.py +++ b/dojo/tools/crashtest_security/parser.py @@ -185,7 +185,7 @@ def get_items(self, tree, test): title = re.sub(r" \([0-9]*\)$", "", title) # Attache CVEs - vulnerability_id = re.findall("CVE-\\d{4}-\\d{4,10}", title)[0] if "CVE" in title else None + vulnerability_id = re.findall(r"CVE-\d{4}-\d{4,10}", title)[0] if "CVE" in title else None description = failure.get("message") severity = failure.get("type").capitalize() diff --git a/dojo/tools/cyclonedx/xml_parser.py b/dojo/tools/cyclonedx/xml_parser.py index 70682c0c6a8..55aa4995356 100644 --- a/dojo/tools/cyclonedx/xml_parser.py +++ b/dojo/tools/cyclonedx/xml_parser.py @@ -104,7 +104,7 @@ def manage_vulnerability_legacy( [ f"**Ref:** {ref}", f"**Id:** {vuln_id}", - f"**Severity:** {str(severity)}", + f"**Severity:** {severity}", ], ) if component_name is None: diff --git a/dojo/tools/gitlab_api_fuzzing/parser.py b/dojo/tools/gitlab_api_fuzzing/parser.py index c536dc00205..1095d21a657 100644 --- a/dojo/tools/gitlab_api_fuzzing/parser.py +++ b/dojo/tools/gitlab_api_fuzzing/parser.py @@ -28,12 +28,11 @@ def get_findings(self, file, test): title = vulnerability["name"] severity = self.normalise_severity(vulnerability["severity"]) description = vulnerability.get("category", "") - try: - location = vulnerability["location"] - description += "\n" + location["crash_type"] - description += "\n" + location["crash_state"] - except: - pass + if location := vulnerability.get("location"): + if crash_type := location.get("crash_type"): + description += f"\n{crash_type}" + if crash_state := location.get("crash_state"): + description += f"\n{crash_state}" findings.append( Finding( title=title, diff --git a/dojo/tools/gosec/parser.py b/dojo/tools/gosec/parser.py index 20ccbcae062..d7e32f46a85 100644 --- a/dojo/tools/gosec/parser.py +++ b/dojo/tools/gosec/parser.py @@ -34,7 +34,7 @@ def get_findings(self, filename, test): # Finding details information findingdetail += f"Filename: {filename}\n\n" - findingdetail += f"Line number: {str(line)}\n\n" + findingdetail += f"Line number: {line}\n\n" findingdetail += f"Issue Confidence: {scanner_confidence}\n\n" findingdetail += "Code:\n\n" findingdetail += "```{}```".format(item["code"]) diff --git a/dojo/tools/h1/parser.py b/dojo/tools/h1/parser.py index 62072f5eb27..772700f3176 100644 --- a/dojo/tools/h1/parser.py +++ b/dojo/tools/h1/parser.py @@ -118,11 +118,8 @@ def build_description(self, content): description += f"Triaged: {triaged_date}\n" # Try to grab CVSS - try: - cvss = content["relationships"]["severity"]["data"]["attributes"]["score"] + if cvss := content.get("relationships", {}).get("severity", {}).get("data", {}).get("attributes", {}).get("score"): description += f"CVSS: {cvss}\n" - except Exception: - pass # Build rest of description meat description += "##Report: \n{}\n".format( @@ -130,12 +127,9 @@ def build_description(self, content): ) # Try to grab weakness if it's there - try: - weakness_title = content["relationships"]["weakness"]["data"]["attributes"]["name"] - weakness_desc = content["relationships"]["weakness"]["data"]["attributes"]["description"] - description += f"\n##Weakness: {weakness_title}\n{weakness_desc}" - except Exception: - pass + if weakness_title := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("name"): + if weakness_desc := content.get("relationships", {}).get("weakness", {}).get("data", {}).get("attributes", {}).get("description"): + description += f"\n##Weakness: {weakness_title}\n{weakness_desc}" return description diff --git a/dojo/tools/kiuwan/parser.py b/dojo/tools/kiuwan/parser.py index 34601b05aae..1caeb78c803 100644 --- a/dojo/tools/kiuwan/parser.py +++ b/dojo/tools/kiuwan/parser.py @@ -1,4 +1,3 @@ -import contextlib import csv import hashlib import io @@ -105,8 +104,9 @@ def get_findings(self, filename, test): finding.mitigation = "Not provided!" finding.severity = findingdict["severity"] finding.static_finding = True - with contextlib.suppress(Exception): - finding.cwe = int(row["CWE"]) + if cwe := row.get("CWE"): + if cwe.isdigit(): + finding.cwe = int(cwe) if finding is not None: if finding.title is None: diff --git a/dojo/tools/microfocus_webinspect/parser.py b/dojo/tools/microfocus_webinspect/parser.py index bf4475580d0..df1b4f84bac 100644 --- a/dojo/tools/microfocus_webinspect/parser.py +++ b/dojo/tools/microfocus_webinspect/parser.py @@ -111,7 +111,7 @@ def convert_severity(val): @staticmethod def get_cwe(val): # Match only the first CWE! - cweSearch = re.search("CWE-(\\d+)", val, re.IGNORECASE) + cweSearch = re.search(r"CWE-(\d+)", val, re.IGNORECASE) if cweSearch: return int(cweSearch.group(1)) return 0 diff --git a/dojo/tools/nexpose/parser.py b/dojo/tools/nexpose/parser.py index d6b63c66c8a..08916d42901 100644 --- a/dojo/tools/nexpose/parser.py +++ b/dojo/tools/nexpose/parser.py @@ -265,7 +265,7 @@ def get_items(self, tree, vulns, test): "severity": "Info", "tags": [ re.sub( - "[^A-Za-z0-9]+", + r"[^A-Za-z0-9]+", "-", service.get("name").lower(), ).rstrip("-"), diff --git a/dojo/tools/npm_audit/parser.py b/dojo/tools/npm_audit/parser.py index 6296477a971..186f133e6ab 100644 --- a/dojo/tools/npm_audit/parser.py +++ b/dojo/tools/npm_audit/parser.py @@ -66,7 +66,7 @@ def censor_path_hashes(path): if not path: return None - return re.sub("[a-f0-9]{64}", "censored_by_npm_audit", path) + return re.sub(r"[a-f0-9]{64}", "censored_by_npm_audit", path) def get_item(item_node, test): diff --git a/dojo/tools/qualys_webapp/parser.py b/dojo/tools/qualys_webapp/parser.py index 825d55b531a..989e5ba48cd 100644 --- a/dojo/tools/qualys_webapp/parser.py +++ b/dojo/tools/qualys_webapp/parser.py @@ -34,7 +34,7 @@ def truncate_str(value: str, maxlen: int): # Parse 'CWE-XXXX' format to strip just the numbers def get_cwe(cwe): - cweSearch = re.search("CWE-([0-9]*)", cwe, re.IGNORECASE) + cweSearch = re.search(r"CWE-([0-9]*)", cwe, re.IGNORECASE) if cweSearch: return cweSearch.group(1) return 0 diff --git a/dojo/tools/sarif/parser.py b/dojo/tools/sarif/parser.py index aa3d878ffb4..4c539583564 100644 --- a/dojo/tools/sarif/parser.py +++ b/dojo/tools/sarif/parser.py @@ -156,7 +156,7 @@ def get_message_from_multiformatMessageString(data, rule): def cve_try(val): # Match only the first CVE! - cveSearch = re.search("(CVE-[0-9]+-[0-9]+)", val, re.IGNORECASE) + cveSearch = re.search(r"(CVE-[0-9]+-[0-9]+)", val, re.IGNORECASE) if cveSearch: return cveSearch.group(1).upper() return None @@ -241,10 +241,10 @@ def get_codeFlowsDescription(codeFlows): snippet = "" if "startLine" in region: - start_line = f":L{str(region.get('startLine'))}" + start_line = f":L{region.get('startLine')}" if "startColumn" in region: - start_column = f":C{str(region.get('startColumn'))}" + start_column = f":C{region.get('startColumn')}" if "snippet" in region: snippet = f"\t-\t{region.get('snippet').get('text')}" diff --git a/dojo/tools/sonarqube/soprasteria_helper.py b/dojo/tools/sonarqube/soprasteria_helper.py index 2e7259e6376..63b59607e6a 100644 --- a/dojo/tools/sonarqube/soprasteria_helper.py +++ b/dojo/tools/sonarqube/soprasteria_helper.py @@ -41,7 +41,7 @@ def get_references(self, rule_name, vuln_details): def get_cwe(self, vuln_references): # Match only the first CWE! - cweSearch = re.search("CWE-([0-9]*)", vuln_references, re.IGNORECASE) + cweSearch = re.search(r"CWE-([0-9]*)", vuln_references, re.IGNORECASE) if cweSearch: return cweSearch.group(1) return 0 diff --git a/dojo/tools/tenable/xml_format.py b/dojo/tools/tenable/xml_format.py index ae63151ec5a..045a17e0c37 100644 --- a/dojo/tools/tenable/xml_format.py +++ b/dojo/tools/tenable/xml_format.py @@ -112,8 +112,8 @@ def get_findings(self, filename: str, test: Test) -> list: item.find("plugin_output"), ) if plugin_output_element_text is not None: - plugin_output = f"Plugin Output: {ip}{str(f':{port}' if port is not None else '')}" - plugin_output += f"\n```\n{str(plugin_output_element_text)}\n```\n\n" + plugin_output = f"Plugin Output: {ip}{f':{port}' if port is not None else ''}" + plugin_output += f"\n```\n{plugin_output_element_text}\n```\n\n" description += plugin_output # Determine the severity diff --git a/dojo/tools/trivy_operator/uniform_vulnid.py b/dojo/tools/trivy_operator/uniform_vulnid.py index b3aae5055e4..b03ef9acbed 100644 --- a/dojo/tools/trivy_operator/uniform_vulnid.py +++ b/dojo/tools/trivy_operator/uniform_vulnid.py @@ -8,12 +8,12 @@ def return_uniformed_vulnid(self, vulnid): if "cve" in vulnid.lower(): return vulnid if "khv" in vulnid.lower(): - temp = re.compile("([a-zA-Z-_]+)([0-9]+)") + temp = re.compile(r"([a-zA-Z-_]+)([0-9]+)") number = str(temp.match(vulnid).groups()[1]).zfill(3) avd_category = str(temp.match(vulnid.lower()).groups()[0]) return avd_category.upper() + number if "ksv" in vulnid.lower() or "kcv" in vulnid.lower(): - temp = re.compile("([a-zA-Z-_]+)([0-9]+)") + temp = re.compile(r"([a-zA-Z-_]+)([0-9]+)") number = str(temp.match(vulnid).groups()[1]).zfill(4) avd_category = str(temp.match(vulnid.lower().replace("_", "").replace("-", "")).groups()[0].replace("avd", "")) return "AVD-" + avd_category.upper() + "-" + number diff --git a/dojo/tools/veracode/json_parser.py b/dojo/tools/veracode/json_parser.py index b873ada3531..df83cbb802e 100644 --- a/dojo/tools/veracode/json_parser.py +++ b/dojo/tools/veracode/json_parser.py @@ -85,13 +85,13 @@ def get_items(self, tree, test): if not finding: continue # Set the date of the finding from the report if it is present - try: + if finding_status := vuln.get("finding_status"): if settings.USE_FIRST_SEEN: - finding.date = parser.parse(vuln.get("finding_status", {}).get("first_found_date", "")) + if first_found_date := finding_status.get("first_found_date"): + finding.date = parser.parse(first_found_date) else: - finding.date = parser.parse(vuln.get("finding_status", {}).get("last_found_date", "")) - except Exception: - pass + if last_found_date := finding_status.get("last_found_date"): + finding.date = parser.parse(last_found_date) # Generate the description finding = self.parse_description(finding, vuln.get("description"), scan_type) finding.nb_occurences = vuln.get("count", 1) @@ -129,7 +129,7 @@ def create_finding_from_details(self, finding_details, scan_type, policy_violate if uncleaned_cvss.startswith(("CVSS:3.1/", "CVSS:3.0/")): finding.cvssv3 = CVSS3(str(uncleaned_cvss)).clean_vector(output_prefix=True) elif not uncleaned_cvss.startswith("CVSS"): - finding.cvssv3 = CVSS3(f"CVSS:3.1/{str(uncleaned_cvss)}").clean_vector(output_prefix=True) + finding.cvssv3 = CVSS3(f"CVSS:3.1/{uncleaned_cvss}").clean_vector(output_prefix=True) elif isinstance(uncleaned_cvss, float | int): finding.cvssv3_score = float(uncleaned_cvss) # Fill in extra info based on the scan type @@ -238,7 +238,7 @@ def add_sca_details(self, finding, finding_details, backup_title=None) -> Findin # See if the CVSS has already been set. If not, use the one here if not finding.cvssv3: if cvss_vector := cve_dict.get("cvss3", {}).get("vector"): - finding.cvssv3 = CVSS3(f"CVSS:3.1/{str(cvss_vector)}").clean_vector(output_prefix=True) + finding.cvssv3 = CVSS3(f"CVSS:3.1/{cvss_vector}").clean_vector(output_prefix=True) # Put the product ID in the metadata if product_id := finding_details.get("product_id"): finding.description += f"**Product ID**: {product_id}\n" diff --git a/dojo/tools/veracode/xml_parser.py b/dojo/tools/veracode/xml_parser.py index 17061402d6f..1e53b5545c4 100644 --- a/dojo/tools/veracode/xml_parser.py +++ b/dojo/tools/veracode/xml_parser.py @@ -271,7 +271,7 @@ def __xml_dynamic_flaw_to_finding( @staticmethod def _get_cwe(val): # Match only the first CWE! - cweSearch = re.search("CWE-(\\d+)", val, re.IGNORECASE) + cweSearch = re.search(r"CWE-(\d+)", val, re.IGNORECASE) if cweSearch: return int(cweSearch.group(1)) return None diff --git a/dojo/tools/wapiti/parser.py b/dojo/tools/wapiti/parser.py index 335281b9701..591ae3a390b 100644 --- a/dojo/tools/wapiti/parser.py +++ b/dojo/tools/wapiti/parser.py @@ -104,7 +104,7 @@ def get_findings(self, file, test): @staticmethod def get_cwe(val): # Match only the first CWE! - cweSearch = re.search("CWE-(\\d+)", val, re.IGNORECASE) + cweSearch = re.search(r"CWE-(\d+)", val, re.IGNORECASE) if cweSearch: return int(cweSearch.group(1)) return None diff --git a/dojo/user/validators.py b/dojo/user/validators.py index 83ee954419e..f6b665bc1c2 100644 --- a/dojo/user/validators.py +++ b/dojo/user/validators.py @@ -45,7 +45,7 @@ def get_help_text(self): class UppercaseValidator: def validate(self, password, user=None): - if not re.findall("[A-Z]", password) and get_system_setting("uppercase_character_required"): + if not re.findall(r"[A-Z]", password) and get_system_setting("uppercase_character_required"): raise ValidationError( self.get_help_text(), code="password_no_upper") @@ -57,7 +57,7 @@ def get_help_text(self): class LowercaseValidator: def validate(self, password, user=None): - if not re.findall("[a-z]", password) and get_system_setting("lowercase_character_required"): + if not re.findall(r"[a-z]", password) and get_system_setting("lowercase_character_required"): raise ValidationError( self.get_help_text(), code="password_no_lower") diff --git a/dojo/user/views.py b/dojo/user/views.py index 0f8914e4adf..44ba788253b 100644 --- a/dojo/user/views.py +++ b/dojo/user/views.py @@ -647,7 +647,7 @@ def clean(self): connection.open() connection.close() except Exception as e: - logger.error(f"SMTP Server Connection Failure: {str(e)}") + logger.error(f"SMTP Server Connection Failure: {e}") msg = "SMTP server is not configured correctly..." raise ValidationError(msg) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 7a0e49b95de..591ab3ae6e9 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -1,12 +1,12 @@ dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami - version: 16.2.0 + version: 16.3.0 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami version: 9.4.11 - name: redis repository: https://charts.bitnami.com/bitnami version: 19.6.4 -digest: sha256:0d2e729a1b07543cb813f80f5d05c67ad56817f1b44911e08245e43868f49301 -generated: "2024-11-14T10:51:48.400717864Z" +digest: sha256:896db01c8521d42f6830a84190fb0a679afb2a999a79e3d82226d0b871f7778d +generated: "2024-12-11T06:49:40.425726453Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index ab66f338320..231c924c168 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -10,7 +10,7 @@ maintainers: url: https://github.com/DefectDojo/django-DefectDojo dependencies: - name: postgresql - version: ~16.2.0 + version: ~16.3.0 repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha diff --git a/helm/defectdojo/values.yaml b/helm/defectdojo/values.yaml index 8cd5d0aca3b..0deb30aaff4 100644 --- a/helm/defectdojo/values.yaml +++ b/helm/defectdojo/values.yaml @@ -125,7 +125,7 @@ monitoring: # Add the nginx prometheus exporter sidecar prometheus: enabled: false - image: nginx/nginx-prometheus-exporter:1.3.0 + image: nginx/nginx-prometheus-exporter:1.4.0 imagePullPolicy: IfNotPresent annotations: {} @@ -478,7 +478,7 @@ cloudsql: image: # set repo and image tag of gce-proxy repository: gcr.io/cloudsql-docker/gce-proxy - tag: 1.37.2 + tag: 1.37.3 pullPolicy: IfNotPresent # set CloudSQL instance: 'project:zone:instancename' instance: "" diff --git a/requirements-lint.txt b/requirements-lint.txt index 6821d390595..25336e7513c 100644 --- a/requirements-lint.txt +++ b/requirements-lint.txt @@ -1 +1 @@ -ruff==0.7.4 +ruff==0.8.0 diff --git a/requirements.txt b/requirements.txt index f9c0a7d1c3e..20e31134df3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,7 +21,7 @@ django-slack==5.19.0 git+https://github.com/DefectDojo/django-tagging@develop#egg=django-tagging django-watson==1.6.3 django-prometheus==2.3.1 -Django==5.1.3 +Django==5.1.4 djangorestframework==3.15.2 html2text==2024.2.26 humanize==4.11.0 @@ -35,7 +35,7 @@ psycopg[c]==3.2.3 cryptography==44.0.0 python-dateutil==2.9.0.post0 pytz==2024.2 -redis==5.2.0 +redis==5.2.1 requests==2.32.3 sqlalchemy==2.0.36 # Required by Celery broker transport urllib3==1.26.18 @@ -63,13 +63,13 @@ django-fieldsignals==0.7.0 hyperlink==21.0.0 django-test-migrations==1.4.0 djangosaml2==1.9.3 -drf-spectacular==0.27.2 -drf-spectacular-sidecar==2024.11.1 +drf-spectacular==0.28.0 +drf-spectacular-sidecar==2024.12.1 django-ratelimit==4.1.0 argon2-cffi==23.1.0 blackduck==1.1.3 pycurl==7.45.3 # Required for Celery Broker AWS (SQS) support -boto3==1.35.71 # Required for Celery Broker AWS (SQS) support +boto3==1.35.78 # Required for Celery Broker AWS (SQS) support netaddr==1.3.0 vulners==2.2.3 fontawesomefree==6.6.0 diff --git a/ruff.toml b/ruff.toml index e9008490a55..12b556d5cf3 100644 --- a/ruff.toml +++ b/ruff.toml @@ -41,7 +41,7 @@ select = [ "UP", "YTT", "ASYNC", - "S2", "S5", "S7", "S101", "S104", "S105", "S106", "S108", "S311", "S112", "S113", + "S1", "S2", "S5", "S7", "S311", "FBT001", "FBT003", "A003", "A004", "A005", "A006", "COM", @@ -93,10 +93,8 @@ ignore = [ "SIM115", "SIM116", "SIM117", - "RUF010", "RUF012", "RUF015", - "RUF027", "D205", "D211", # `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible. "D212", # `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible. diff --git a/tests/Import_scanner_test.py b/tests/Import_scanner_test.py index 737b48bdcf0..eefe72d5b09 100644 --- a/tests/Import_scanner_test.py +++ b/tests/Import_scanner_test.py @@ -145,8 +145,8 @@ def test_engagement_import_scan_result(self): options_text = [scan.strip() for scan in options_text] mod_options = options_text - mod_options = [re.sub(" Scanner", "", scan) for scan in mod_options] - mod_options = [re.sub(" Scan", "", scan) for scan in mod_options] + mod_options = [re.sub(r" Scanner", "", scan) for scan in mod_options] + mod_options = [re.sub(r" Scan", "", scan) for scan in mod_options] mod_options = [scan.lower().replace("-", " ").replace(".", "") for scan in mod_options] acronyms = [] @@ -172,11 +172,8 @@ def test_engagement_import_scan_result(self): index = list(found_matches.keys())[0] scan_map[test] = options_text[index] elif len(found_matches) > 1: - try: - index = list(found_matches.values()).index(temp_test) - scan_map[test] = options_text[list(found_matches.keys())[index]] - except: - pass + index = list(found_matches.values()).index(temp_test) + scan_map[test] = options_text[list(found_matches.keys())[index]] failed_tests = [] for test in self.tests: @@ -199,7 +196,7 @@ def test_engagement_import_scan_result(self): driver.find_element(By.ID, "id_file").send_keys(test_location) driver.find_element(By.CSS_SELECTOR, "input.btn.btn-primary").click() EngagementTXT = "".join(driver.find_element(By.TAG_NAME, "BODY").text).split("\n") - reg = re.compile("processed, a total of") + reg = re.compile(r"processed, a total of") matches = list(filter(reg.search, EngagementTXT)) if len(matches) != 1: failed_tests += [test.upper() + " - " + case + ": Not imported"] diff --git a/tests/base_test_class.py b/tests/base_test_class.py index c4b056503a6..7fcc3a6f203 100644 --- a/tests/base_test_class.py +++ b/tests/base_test_class.py @@ -1,4 +1,3 @@ -import contextlib import logging import os import re @@ -238,11 +237,7 @@ def goto_all_findings_list(self, driver): return driver def wait_for_datatable_if_content(self, no_content_id, wrapper_id): - no_content = None - with contextlib.suppress(Exception): - no_content = self.driver.find_element(By.ID, no_content_id) - - if no_content is None: + if not self.is_element_by_id_present(no_content_id): # wait for product_wrapper div as datatables javascript modifies the DOM on page load. WebDriverWait(self.driver, 30).until( EC.presence_of_element_located((By.ID, wrapper_id)), @@ -338,7 +333,7 @@ def enable_github(self): def set_block_execution(self, block_execution=True): # we set the admin user (ourselves) to have block_execution checked # this will force dedupe to happen synchronously, among other things like notifications, rules, ... - logger.info(f"setting block execution to: {str(block_execution)}") + logger.info(f"setting block execution to: {block_execution}") driver = self.driver driver.get(self.base_url + "profile") if ( diff --git a/unittests/test_deduplication_logic.py b/unittests/test_deduplication_logic.py index ef1d91a0d53..319c0761312 100644 --- a/unittests/test_deduplication_logic.py +++ b/unittests/test_deduplication_logic.py @@ -1158,12 +1158,12 @@ def log_findings(self, findings): else: logger.debug("\t\t" + "findings:") for finding in findings: - logger.debug(f"\t\t\t{str(finding.id):4.4}" + ': "' + f"{finding.title:20.20}" + '": ' + f"{finding.severity:5.5}" + ": act: " + f"{str(finding.active):5.5}" - + ": ver: " + f"{str(finding.verified):5.5}" + ": mit: " + f"{str(finding.is_mitigated):5.5}" - + ": dup: " + f"{str(finding.duplicate):5.5}" + ": dup_id: " - + (f"{str(finding.duplicate_finding.id):4.4}" if finding.duplicate_finding else "None") + ": hash_code: " + str(finding.hash_code) + logger.debug(f"\t\t\t{finding.id!s:4.4}" + ': "' + f"{finding.title:20.20}" + '": ' + f"{finding.severity:5.5}" + ": act: " + f"{finding.active!s:5.5}" + + ": ver: " + f"{finding.verified!s:5.5}" + ": mit: " + f"{finding.is_mitigated!s:5.5}" + + ": dup: " + f"{finding.duplicate!s:5.5}" + ": dup_id: " + + (f"{finding.duplicate_finding.id!s:4.4}" if finding.duplicate_finding else "None") + ": hash_code: " + str(finding.hash_code) + ": eps: " + str(finding.endpoints.count()) + ": notes: " + str([n.id for n in finding.notes.all()]) - + ": uid: " + f"{str(finding.unique_id_from_tool):5.5}" + (" fp" if finding.false_p else ""), + + ": uid: " + f"{finding.unique_id_from_tool!s:5.5}" + (" fp" if finding.false_p else ""), ) logger.debug("\t\tendpoints") diff --git a/unittests/test_false_positive_history_logic.py b/unittests/test_false_positive_history_logic.py index c4d939fbc42..04fca655b58 100644 --- a/unittests/test_false_positive_history_logic.py +++ b/unittests/test_false_positive_history_logic.py @@ -1678,12 +1678,12 @@ def log_findings(self, findings): else: logger.debug("\t\t" + "findings:") for finding in findings: - logger.debug(f"\t\t\t{str(finding.id):4.4}" + ': "' + f"{finding.title:20.20}" + '": ' + f"{finding.severity:5.5}" + ": act: " + f"{str(finding.active):5.5}" - + ": ver: " + f"{str(finding.verified):5.5}" + ": mit: " + f"{str(finding.is_mitigated):5.5}" - + ": dup: " + f"{str(finding.duplicate):5.5}" + ": dup_id: " - + (f"{str(finding.duplicate_finding.id):4.4}" if finding.duplicate_finding else "None") + ": hash_code: " + str(finding.hash_code) + logger.debug(f"\t\t\t{finding.id!s:4.4}" + ': "' + f"{finding.title:20.20}" + '": ' + f"{finding.severity:5.5}" + ": act: " + f"{finding.active!s:5.5}" + + ": ver: " + f"{finding.verified!s:5.5}" + ": mit: " + f"{finding.is_mitigated!s:5.5}" + + ": dup: " + f"{finding.duplicate!s:5.5}" + ": dup_id: " + + (f"{finding.duplicate_finding.id!s:4.4}" if finding.duplicate_finding else "None") + ": hash_code: " + str(finding.hash_code) + ": eps: " + str(finding.endpoints.count()) + ": notes: " + str([n.id for n in finding.notes.all()]) - + ": uid: " + f"{str(finding.unique_id_from_tool):5.5}" + (" fp" if finding.false_p else ""), + + ": uid: " + f"{finding.unique_id_from_tool!s:5.5}" + (" fp" if finding.false_p else ""), ) logger.debug("\t\tendpoints") diff --git a/unittests/test_rest_framework.py b/unittests/test_rest_framework.py index 5a600315536..fa30780c922 100644 --- a/unittests/test_rest_framework.py +++ b/unittests/test_rest_framework.py @@ -1119,7 +1119,7 @@ def test_request_response_post_and_download(self): # Test the creation for level in self.url_levels: length = FileUpload.objects.count() - with open(f"{str(self.path)}/scans/acunetix/one_finding.xml", encoding="utf-8") as testfile: + with open(f"{self.path}/scans/acunetix/one_finding.xml", encoding="utf-8") as testfile: payload = { "title": level, "file": testfile, @@ -1131,7 +1131,7 @@ def test_request_response_post_and_download(self): self.url_levels[level] = response.data.get("id") # Test the download - file_data = Path(f"{str(self.path)}/scans/acunetix/one_finding.xml").read_text(encoding="utf-8") + file_data = Path(f"{self.path}/scans/acunetix/one_finding.xml").read_text(encoding="utf-8") for level, file_id in self.url_levels.items(): response = self.client.get(f"/api/v2/{level}/files/download/{file_id}/") self.assertEqual(200, response.status_code)