diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 713480dd33d..ba1ba50d658 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -36,7 +36,7 @@ A clear and concise description of what you expected to happen. - DefectDojo version (see footer) or commit message: [use `git show -s --format="[%ci] %h: %s [%d]"`] **Logs** -Use `docker-compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). +Use `docker compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). **Sample scan files** If applicable, add sample scan files to help reproduce your problem. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md index 7eda2a58dea..f575ea0762d 100644 --- a/.github/ISSUE_TEMPLATE/support_request.md +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -7,7 +7,7 @@ assignees: '' --- **Slack us first!** -The easiest and fastest way to help you is via Slack. There's a free and easy signup to join our #defectdojo channel in the OWASP Slack workspace: [Get Access.](https://owasp-slack.herokuapp.com/) +The easiest and fastest way to help you is via Slack. There's a free and easy signup to join our #defectdojo channel in the OWASP Slack workspace: [Get Access.](https://owasp.org/slack/invite) If you're confident you've found a bug, or are allergic to Slack, you can submit an issue anyway. **Be informative** @@ -36,7 +36,7 @@ A clear and concise description of what you expected to happen. - DefectDojo version (see footer) or commit message: [use `git show -s --format="[%ci] %h: %s [%d]"`] **Logs** -Use `docker-compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). +Use `docker compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). **Sample scan files** If applicable, add sample scan files to help reproduce your problem. diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml index de040266a13..cd9c549494e 100644 --- a/.github/workflows/build-docker-images-for-testing.yml +++ b/.github/workflows/build-docker-images-for-testing.yml @@ -37,20 +37,20 @@ jobs: id: docker_build uses: docker/build-push-action@v6 timeout-minutes: 10 + env: + DOCKER_BUILD_CHECKS_ANNOTATIONS: false with: context: . push: false tags: defectdojo/defectdojo-${{ matrix.docker-image }}:${{ matrix.os }} file: Dockerfile.${{ matrix.docker-image }}-${{ matrix.os }} outputs: type=docker,dest=${{ matrix.docker-image }}-${{ matrix.os }}_img - cache-from: type=gha,scope=${{ matrix.docker-image }} - cache-to: type=gha,mode=max,scope=${{ matrix.docker-image }} - + # export docker images to be used in next jobs below - name: Upload image ${{ matrix.docker-image }} as artifact timeout-minutes: 10 - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: ${{ matrix.docker-image }} + name: built-docker-image-${{ matrix.docker-image }}-${{ matrix.os }} path: ${{ matrix.docker-image }}-${{ matrix.os }}_img - retention-days: 1 \ No newline at end of file + retention-days: 1 diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index 7928fadd9e8..5ec0aa9abad 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -51,7 +51,7 @@ jobs: run: docker compose down - name: Upload oas.${{ matrix.file-type }} as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: oas-${{ matrix.file-type }} path: oas.${{ matrix.file-type }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 0b12d25a772..cd8d8072377 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -45,14 +45,18 @@ jobs: # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 + with: + path: built-docker-image + pattern: built-docker-image-* + merge-multiple: true - name: Load docker images timeout-minutes: 10 run: |- - docker load -i nginx/nginx-${{ matrix.os }}_img - docker load -i django/django-${{ matrix.os }}_img - docker load -i integration-tests/integration-tests-debian_img + docker load -i built-docker-image/nginx-${{ matrix.os }}_img + docker load -i built-docker-image/django-${{ matrix.os }}_img + docker load -i built-docker-image/integration-tests-debian_img docker images - name: Set integration-test mode diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 01ff41728c6..a4feb77273f 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -28,16 +28,16 @@ jobs: # are tested (https://docs.aws.amazon.com/eks/latest/userguide/kubernetes-versions.html#available-versions) - databases: pgsql brokers: redis - k8s: 'v1.26.11' + k8s: 'v1.30.3' os: debian steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.11.0 + uses: manusa/actions-setup-minikube@v2.13.0 with: - minikube version: 'v1.31.2' + minikube version: 'v1.33.1' kubernetes version: ${{ matrix.k8s }} driver: docker start args: '--addons=ingress --cni calico' @@ -48,14 +48,18 @@ jobs: minikube status - name: Load images from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 + with: + path: built-docker-image + pattern: built-docker-image-* + merge-multiple: true - name: Load docker images timeout-minutes: 10 run: |- eval $(minikube docker-env) - docker load -i nginx/nginx-${{ matrix.os }}_img - docker load -i django/django-${{ matrix.os }}_img + docker load -i built-docker-image/nginx-${{ matrix.os }}_img + docker load -i built-docker-image/django-${{ matrix.os }}_img docker images - name: Configure HELM repos @@ -70,11 +74,13 @@ jobs: echo "pgsql=${{ env.HELM_PG_DATABASE_SETTINGS }}" >> $GITHUB_ENV echo "redis=${{ env.HELM_REDIS_BROKER_SETTINGS }}" >> $GITHUB_ENV - - name: Deploying Djano application with ${{ matrix.databases }} ${{ matrix.brokers }} - timeout-minutes: 10 + - name: Deploying Django application with ${{ matrix.databases }} ${{ matrix.brokers }} + timeout-minutes: 15 run: |- helm install \ --timeout 800s \ + --wait \ + --wait-for-jobs \ defectdojo \ ./helm/defectdojo \ --set django.ingress.enabled=true \ @@ -82,14 +88,14 @@ jobs: ${{ env[matrix.databases] }} \ ${{ env[matrix.brokers] }} \ --set createSecret=true \ - --set tag=${{ matrix.os }} \ - # --set imagePullSecrets=defectdojoregistrykey + --set tag=${{ matrix.os }} - name: Check deployment status + if: always() run: |- - kubectl get pods - kubectl get ingress - kubectl get services + kubectl get all,ingress # all = pods, services, deployments, replicasets, statefulsets, jobs + helm status defectdojo + helm history defectdojo - name: Check Application timeout-minutes: 10 diff --git a/.github/workflows/release-3-master-into-dev.yml b/.github/workflows/release-3-master-into-dev.yml index b5c8828ee16..cbd287d232e 100644 --- a/.github/workflows/release-3-master-into-dev.yml +++ b/.github/workflows/release-3-master-into-dev.yml @@ -50,11 +50,15 @@ jobs: CURRENT_CHART_VERSION=$(grep -oP 'version: (\K\S*)?' helm/defectdojo/Chart.yaml | head -1) sed -ri "0,/version/s/version: \S+/$(echo "version: $CURRENT_CHART_VERSION" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{$NF=sprintf("%0*d", length($NF), ($NF+1)); print}')-dev/" helm/defectdojo/Chart.yaml + - name: Update settings SHA + run: sha256sum dojo/settings/settings.dist.py | cut -d ' ' -f1 > dojo/settings/.settings.dist.py.sha256sum + - name: Check numbers run: | grep version dojo/__init__.py grep appVersion helm/defectdojo/Chart.yaml grep version components/package.json + cat dojo/settings/.settings.dist.py.sha256sum - name: Create upgrade notes to documentation run: | @@ -132,11 +136,15 @@ jobs: CURRENT_CHART_VERSION=$(grep -oP 'version: (\K\S*)?' helm/defectdojo/Chart.yaml | head -1) sed -ri "0,/version/s/version: \S+/$(echo "version: $CURRENT_CHART_VERSION" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{$NF=sprintf("%0*d", length($NF), ($NF+1)); print}')-dev/" helm/defectdojo/Chart.yaml + - name: Update settings SHA + run: sha256sum dojo/settings/settings.dist.py | cut -d ' ' -f1 > dojo/settings/.settings.dist.py.sha256sum + - name: Check numbers run: | grep version dojo/__init__.py grep appVersion helm/defectdojo/Chart.yaml grep version components/package.json + cat dojo/settings/.settings.dist.py.sha256sum - name: Push version changes uses: stefanzweifel/git-auto-commit-action@v5.0.1 diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index d05cb191428..0e42769cd76 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -47,7 +47,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Load OAS files from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 + with: + pattern: oas-* - name: Upload Release Asset - OpenAPI Specification - YAML id: upload-release-asset-yaml diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index 6e167143783..6f8862b6216 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -49,22 +49,11 @@ jobs: id: buildx uses: docker/setup-buildx-action@v3 - - name: Cache Docker layers - uses: actions/cache@v4 - env: - docker-image: ${{ matrix.docker-image }} - with: - path: /tmp/.buildx-cache-${{ env.docker-image }} - key: ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name }}-${{ github.sha }}-${{ github.run_id }} - restore-keys: | - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name}}-${{ github.sha }} - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name }} - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}- - - name: Build and push images with debian if: ${{ matrix.os == 'debian' }} uses: docker/build-push-action@v6 env: + DOCKER_BUILD_CHECKS_ANNOTATIONS: false REPO_ORG: ${{ env.repoorg }} docker-image: ${{ matrix.docker-image }} with: @@ -72,13 +61,12 @@ jobs: tags: ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}-${{ matrix.os }}, ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}, ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:latest file: ./Dockerfile.${{ env.docker-image }}-${{ matrix.os }} context: . - cache-from: type=local,src=/tmp/.buildx-cache-${{ env.docker-image }} - cache-to: type=local,dest=/tmp/.buildx-cache-${{ env.docker-image }} - name: Build and push images with alpine if: ${{ matrix.os == 'alpine' }} uses: docker/build-push-action@v6 env: + DOCKER_BUILD_CHECKS_ANNOTATIONS: false REPO_ORG: ${{ env.repoorg }} docker-image: ${{ matrix.docker-image }} with: @@ -86,9 +74,3 @@ jobs: tags: ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}-${{ matrix.os }} file: ./Dockerfile.${{ env.docker-image }}-${{ matrix.os }} context: . - cache-from: type=local,src=/tmp/.buildx-cache-${{ env.docker-image }} - cache-to: type=local,dest=/tmp/.buildx-cache-${{ env.docker-image }} -# platforms: ${{ matrix.platform }} - - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 907ecf92968..bd8ca3322fa 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -20,13 +20,17 @@ jobs: # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 + with: + path: built-docker-image + pattern: built-docker-image-* + merge-multiple: true - name: Load docker images timeout-minutes: 10 run: |- - docker load -i nginx/nginx-${{ matrix.os }}_img - docker load -i django/django-${{ matrix.os }}_img + docker load -i built-docker-image/nginx-${{ matrix.os }}_img + docker load -i built-docker-image/django-${{ matrix.os }}_img docker images # run tests with docker compose @@ -34,8 +38,8 @@ jobs: run: docker/setEnv.sh unit_tests_cicd # phased startup so we can use the exit code from unit test container - - name: Start Postgres - run: docker compose up -d postgres + - name: Start Postgres and webhook.endpoint + run: docker compose up -d postgres webhook.endpoint # no celery or initializer needed for unit tests - name: Unit tests diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 0ff85f7c2a0..28c77fc9765 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,7 +1,7 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.7.0@sha256:99924315933d49e7b33a7d2074bb2b64fc8def8f74519939036e24eb48f00336 AS openapitools +FROM openapitools/openapi-generator-cli:v7.10.0@sha256:f2054a5a7908ad81017d0f0839514ba5eab06ae628914ff71554d46fac1bcf7a AS openapitools FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS build WORKDIR /app RUN \ @@ -25,8 +25,13 @@ RUN pip install --no-cache-dir selenium==4.9.0 requests # Install the latest Google Chrome stable release WORKDIR /opt/chrome + +# TODO: figure out whatever fix is necessary to use Chrome >= 128 and put this back in the RUN below so we stay +# up-to-date +# chrome_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chrome[] | select(.platform == "linux64").url') && \ + RUN \ - chrome_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chrome[] | select(.platform == "linux64").url') && \ + chrome_url="https://storage.googleapis.com/chrome-for-testing-public/127.0.6533.119/linux64/chrome-linux64.zip" && \ wget $chrome_url && \ unzip chrome-linux64.zip && \ rm -rf chrome-linux64.zip && \ @@ -49,8 +54,12 @@ RUN apt-get install -y libxi6 libgconf-2-4 jq libjq1 libonig5 libxkbcommon0 libx # Installing the latest stable Google Chrome driver release WORKDIR /opt/chrome-driver +# TODO: figure out whatever fix is necessary to use Chrome >= 128 and put this back in the RUN below so we stay +# up-to-date +# chromedriver_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chromedriver[] | select(.platform == "linux64").url') && \ + RUN \ - chromedriver_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chromedriver[] | select(.platform == "linux64").url') && \ + chromedriver_url="https://storage.googleapis.com/chrome-for-testing-public/127.0.6533.119/linux64/chromedriver-linux64.zip" && \ wget $chromedriver_url && \ unzip -j chromedriver-linux64.zip chromedriver-linux64/chromedriver && \ rm -rf chromedriver-linux64.zip && \ diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index 0528e63047b..17abb7c3f82 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -140,7 +140,7 @@ COPY manage.py ./ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.0-alpine@sha256:208b70eefac13ee9be00e486f79c695b15cef861c680527171a27d253d834be9 +FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian index b07ce5407de..b062e28e10e 100644 --- a/Dockerfile.nginx-debian +++ b/Dockerfile.nginx-debian @@ -73,7 +73,7 @@ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.0-alpine@sha256:208b70eefac13ee9be00e486f79c695b15cef861c680527171a27d253d834be9 +FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/NOTICE b/NOTICE index 2b567f302e4..76c7f2c7673 100644 --- a/NOTICE +++ b/NOTICE @@ -1,7 +1,7 @@ DefectDojo is licensed under the 3-Clause BSD License: https://github.com/DefectDojo/django-DefectDojo/blob/master/LICENSE.md -However, DefectDojo’s dependencies may have different licensing requirements and terms. +However, DefectDojo's dependencies may have different licensing requirements and terms. Complete source code for DefectDojo dependencies are made available on PyPi: https://pypi.org/ THIRD-PARTY SOFTWARE NOTICES FOR DEFECTDOJO diff --git a/README.md b/README.md index b0a3bd12372..17d7bedfb3e 100644 --- a/README.md +++ b/README.md @@ -132,19 +132,18 @@ Core Moderators can help you with pull requests or feedback on dev ideas: * Cody Maffucci ([@Maffooch](https://github.com/maffooch) | [LinkedIn](https://www.linkedin.com/in/cody-maffucci)) Moderators can help you with pull requests or feedback on dev ideas: -* Damien Carol ([@damiencarol](https://github.com/damiencarol) | [LinkedIn](https://www.linkedin.com/in/damien-carol/)) -* Jannik Jürgens ([@alles-klar](https://github.com/alles-klar)) -* Dubravko Sever ([@dsever](https://github.com/dsever)) * Charles Neill ([@cneill](https://github.com/cneill) | [@ccneill](https://twitter.com/ccneill)) * Jay Paz ([@jjpaz](https://twitter.com/jjpaz)) * Blake Owens ([@blakeaowens](https://github.com/blakeaowens)) ## Hall of Fame - +* Jannik Jürgens ([@alles-klar](https://github.com/alles-klar)) - Jannik was a long time contributor and moderator for + DefectDojo and made significant contributions to many areas of the platform. Jannik was instrumental in pioneering + and optimizing deployment methods. * Valentijn Scholten ([@valentijnscholten](https://github.com/valentijnscholten) | [Sponsor](https://github.com/sponsors/valentijnscholten) | [LinkedIn](https://www.linkedin.com/in/valentijn-scholten/)) - Valentijn served as a core moderator for 3 years. - Valentijn’s contributions were numerous and extensive. He overhauled, improved, and optimized many parts of the + Valentijn's contributions were numerous and extensive. He overhauled, improved, and optimized many parts of the codebase. He consistently fielded questions, provided feedback on pull requests, and provided a helping hand wherever it was needed. * Fred Blaise ([@madchap](https://github.com/madchap) | [LinkedIn](https://www.linkedin.com/in/fredblaise/)) - Fred diff --git a/components/package.json b/components/package.json index b3d0236ec62..4517b35851f 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.38.0-dev", + "version": "2.41.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { @@ -26,7 +26,7 @@ "google-code-prettify": "^1.0.0", "jquery": "^3.7.1", "jquery-highlight": "3.5.0", - "jquery-ui": "1.13.3", + "jquery-ui": "1.14.1", "jquery.cookie": "1.4.1", "jquery.flot.tooltip": "^0.9.0", "jquery.hotkeys": "jeresig/jquery.hotkeys#master", @@ -35,7 +35,7 @@ "metismenu": "~3.0.7", "moment": "^2.30.1", "morris.js": "morrisjs/morris.js", - "pdfmake": "^0.2.11", + "pdfmake": "^0.2.15", "startbootstrap-sb-admin-2": "1.0.7" }, "engines": { diff --git a/components/yarn.lock b/components/yarn.lock index c02334ce6fd..26479c39938 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -2,15 +2,13 @@ # yarn lockfile v1 -"@foliojs-fork/fontkit@^1.9.1": - version "1.9.1" - resolved "https://registry.yarnpkg.com/@foliojs-fork/fontkit/-/fontkit-1.9.1.tgz#8124649168eb5273f580f66697a139fb5041296b" - integrity sha512-U589voc2/ROnvx1CyH9aNzOQWJp127JGU1QAylXGQ7LoEAF6hMmahZLQ4eqAcgHUw+uyW4PjtCItq9qudPkK3A== +"@foliojs-fork/fontkit@^1.9.2": + version "1.9.2" + resolved "https://registry.yarnpkg.com/@foliojs-fork/fontkit/-/fontkit-1.9.2.tgz#94241c195bc6204157bc84c33f34bdc967eca9c3" + integrity sha512-IfB5EiIb+GZk+77TRB86AHroVaqfq8JRFlUbz0WEwsInyCG0epX2tCPOy+UfaWPju30DeVoUAXfzWXmhn753KA== dependencies: "@foliojs-fork/restructure" "^2.0.2" - brfs "^2.0.0" brotli "^1.2.0" - browserify-optional "^1.0.1" clone "^1.0.4" deep-equal "^1.0.0" dfa "^1.2.0" @@ -18,23 +16,23 @@ unicode-properties "^1.2.2" unicode-trie "^2.0.0" -"@foliojs-fork/linebreak@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@foliojs-fork/linebreak/-/linebreak-1.1.1.tgz#93ecd695b7d2bb0334b9481058c3e610e019a4eb" - integrity sha512-pgY/+53GqGQI+mvDiyprvPWgkTlVBS8cxqee03ejm6gKAQNsR1tCYCIvN9FHy7otZajzMqCgPOgC4cHdt4JPig== +"@foliojs-fork/linebreak@^1.1.1", "@foliojs-fork/linebreak@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@foliojs-fork/linebreak/-/linebreak-1.1.2.tgz#32fee03d5431fa73284373439e172e451ae1e2da" + integrity sha512-ZPohpxxbuKNE0l/5iBJnOAfUaMACwvUIKCvqtWGKIMv1lPYoNjYXRfhi9FeeV9McBkBLxsMFWTVVhHJA8cyzvg== dependencies: base64-js "1.3.1" - brfs "^2.0.2" unicode-trie "^2.0.0" -"@foliojs-fork/pdfkit@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@foliojs-fork/pdfkit/-/pdfkit-0.14.0.tgz#ed1868050edd2904284655f8dcddd56b49576c98" - integrity sha512-nMOiQAv6id89MT3tVTCgc7HxD5ZMANwio2o5yvs5sexQkC0KI3BLaLakpsrHmFfeGFAhqPmZATZGbJGXTUebpg== +"@foliojs-fork/pdfkit@^0.15.1": + version "0.15.1" + resolved "https://registry.yarnpkg.com/@foliojs-fork/pdfkit/-/pdfkit-0.15.1.tgz#ecae3bcb7aad46b58e50493de593317f9b738074" + integrity sha512-4Cq2onHZAhThIfzv3/AFTPALqHzbmV8uNvgRELULWNbsZATgVeqEL4zHOzCyblLfX6tMXVO2BVaPcXboIxGjiw== dependencies: - "@foliojs-fork/fontkit" "^1.9.1" + "@foliojs-fork/fontkit" "^1.9.2" "@foliojs-fork/linebreak" "^1.1.1" crypto-js "^4.2.0" + jpeg-exif "^1.1.4" png-js "^1.0.0" "@foliojs-fork/restructure@^2.0.2": @@ -70,49 +68,6 @@ JUMFlot@jumjum123/JUMFlot#*: version "0.0.0" resolved "https://codeload.github.com/jumjum123/JUMFlot/tar.gz/203147fa2ace27db89e2defcde0800654015ae23" -acorn-node@^1.3.0: - version "1.8.2" - resolved "https://registry.yarnpkg.com/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" - integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== - dependencies: - acorn "^7.0.0" - acorn-walk "^7.0.0" - xtend "^4.0.2" - -acorn-walk@^7.0.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn@^7.0.0: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -amdefine@>=0.0.4: - version "1.0.1" - resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" - integrity sha512-S2Hw0TtNkMJhIabBwIojKL9YHO5T0n5eNqWJ7Lrlel/zDbftQpxpapi8tZs3X1HWa+u+QeydGmzzNU0m09+Rcg== - -array-from@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/array-from/-/array-from-2.1.1.tgz#cfe9d8c26628b9dc5aecc62a9f5d8f1f352c1195" - integrity sha512-GQTc6Uupx1FCavi5mPzBvVT7nEOeWMmUA9P95wpfpW1XwMSKs+KaymD5C2Up7KAUKg/mYwbsUYzdZWcoajlNZg== - -ast-transform@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/ast-transform/-/ast-transform-0.0.0.tgz#74944058887d8283e189d954600947bc98fe0062" - integrity sha512-e/JfLiSoakfmL4wmTGPjv0HpTICVmxwXgYOB8x+mzozHL8v+dSfCbrJ8J8hJ0YBP0XcYu1aLZ6b/3TnxNK3P2A== - dependencies: - escodegen "~1.2.0" - esprima "~1.0.4" - through "~2.3.4" - -ast-types@^0.7.0: - version "0.7.8" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.7.8.tgz#902d2e0d60d071bdcd46dc115e1809ed11c138a9" - integrity sha512-RIOpVnVlltB6PcBJ5BMLx+H+6JJ/zjDGU0t7f0L6c2M1dqcK92VQopLBlPQ9R80AVXelfqYgjcPLtHtDbNFg0Q== - base64-js@1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" @@ -146,16 +101,6 @@ bootstrap@^3.4.1, bootstrap@~3: resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.4.1.tgz#c3a347d419e289ad11f4033e3c4132b87c081d72" integrity sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA== -brfs@^2.0.0, brfs@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/brfs/-/brfs-2.0.2.tgz#44237878fa82aa479ce4f5fe2c1796ec69f07845" - integrity sha512-IrFjVtwu4eTJZyu8w/V2gxU7iLTtcHih67sgEdzrhjLBMHp2uYefUBfdM4k2UvcuWMgV7PQDZHSLeNWnLFKWVQ== - dependencies: - quote-stream "^1.0.1" - resolve "^1.1.5" - static-module "^3.0.2" - through2 "^2.0.0" - brotli@^1.2.0: version "1.3.3" resolved "https://registry.yarnpkg.com/brotli/-/brotli-1.3.3.tgz#7365d8cc00f12cf765d2b2c898716bcf4b604d48" @@ -163,32 +108,6 @@ brotli@^1.2.0: dependencies: base64-js "^1.1.2" -browser-resolve@^1.8.1: - version "1.11.3" - resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.3.tgz#9b7cbb3d0f510e4cb86bdbd796124d28b5890af6" - integrity sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ== - dependencies: - resolve "1.1.7" - -browserify-optional@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-optional/-/browserify-optional-1.0.1.tgz#1e13722cfde0d85f121676c2a72ced533a018869" - integrity sha512-VrhjbZ+Ba5mDiSYEuPelekQMfTbhcA2DhLk2VQWqdcCROWeFqlTcXZ7yfRkXCIl8E+g4gINJYJiRB7WEtfomAQ== - dependencies: - ast-transform "0.0.0" - ast-types "^0.7.0" - browser-resolve "^1.8.1" - -buffer-equal@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-0.0.1.tgz#91bc74b11ea405bc916bc6aa908faafa5b4aac4b" - integrity sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA== - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" @@ -232,21 +151,6 @@ codemirror@^5.63.1: resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.65.12.tgz#294fdf097d10ac5b56a9e011a91eff252afc73ae" integrity sha512-z2jlHBocElRnPYysN2HAuhXbO3DNB0bcSKmNz3hcWR2Js2Dkhc1bEOxG93Z3DeUrnm+qx56XOY5wQmbP5KY0sw== -concat-stream@~1.6.0: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -convert-source-map@^1.5.1: - version "1.9.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" - integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== - core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" @@ -257,19 +161,6 @@ crypto-js@^4.2.0: resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.2.0.tgz#4d931639ecdfd12ff80e8186dba6af2c2e856631" integrity sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q== -d@1, d@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" - integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== - dependencies: - es5-ext "^0.10.50" - type "^1.0.1" - -dash-ast@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/dash-ast/-/dash-ast-2.0.1.tgz#8d0fd2e601c59bf874cc22877ee7dd889f54dee8" - integrity sha512-5TXltWJGc+RdnabUGzhRae1TRq6m4gr+3K2wQX0is5/F2yS6MJXJvLyI3ErAnsAXuJoGqvfVD5icRgim07DrxQ== - datatables.net-bs@>=1.12.1: version "1.13.4" resolved "https://registry.yarnpkg.com/datatables.net-bs/-/datatables.net-bs-1.13.4.tgz#cdab0810f800c21b44ca5c9422120119da13178f" @@ -339,11 +230,6 @@ deep-equal@^1.0.0: object-keys "^1.1.1" regexp.prototype.flags "^1.2.0" -deep-is@~0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - define-properties@^1.1.3: version "1.2.0" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" @@ -376,13 +262,6 @@ drmonty-datatables-responsive@^1.0.0: dependencies: jquery ">=1.7.0" -duplexer2@~0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" - integrity sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA== - dependencies: - readable-stream "^2.0.2" - easymde@^2.18.0: version "2.18.0" resolved "https://registry.yarnpkg.com/easymde/-/easymde-2.18.0.tgz#ff1397d07329b1a7b9187d2d0c20766fa16b3b1b" @@ -394,150 +273,11 @@ easymde@^2.18.0: codemirror-spell-checker "1.1.2" marked "^4.1.0" -es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@^0.10.62, es5-ext@~0.10.14: - version "0.10.64" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.64.tgz#12e4ffb48f1ba2ea777f1fcdd1918ef73ea21714" - integrity sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg== - dependencies: - es6-iterator "^2.0.3" - es6-symbol "^3.1.3" - esniff "^2.0.1" - next-tick "^1.1.0" - -es6-iterator@^2.0.3, es6-iterator@~2.0.1, es6-iterator@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== - dependencies: - d "1" - es5-ext "^0.10.35" - es6-symbol "^3.1.1" - -es6-map@^0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0" - integrity sha512-mz3UqCh0uPCIqsw1SSAkB/p0rOzF/M0V++vyN7JqlPtSW/VsYgQBvVvqMLmfBuyMzTpLnNqi6JmcSizs4jy19A== - dependencies: - d "1" - es5-ext "~0.10.14" - es6-iterator "~2.0.1" - es6-set "~0.1.5" - es6-symbol "~3.1.1" - event-emitter "~0.3.5" - -es6-set@^0.1.5, es6-set@~0.1.5: - version "0.1.6" - resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.6.tgz#5669e3b2aa01d61a50ba79964f733673574983b8" - integrity sha512-TE3LgGLDIBX332jq3ypv6bcOpkLO0AslAQo7p2VqX/1N46YNsvIWgvjojjSEnWEGWMhr1qUbYeTSir5J6mFHOw== - dependencies: - d "^1.0.1" - es5-ext "^0.10.62" - es6-iterator "~2.0.3" - es6-symbol "^3.1.3" - event-emitter "^0.3.5" - type "^2.7.2" - -es6-symbol@^3.1.1, es6-symbol@^3.1.3, es6-symbol@~3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" - integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== - dependencies: - d "^1.0.1" - ext "^1.1.2" - -escodegen@^1.11.1: - version "1.14.3" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503" - integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw== - dependencies: - esprima "^4.0.1" - estraverse "^4.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -escodegen@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.2.0.tgz#09de7967791cc958b7f89a2ddb6d23451af327e1" - integrity sha512-yLy3Cc+zAC0WSmoT2fig3J87TpQ8UaZGx8ahCAs9FL8qNbyV7CVyPKS74DG4bsHiL5ew9sxdYx131OkBQMFnvA== - dependencies: - esprima "~1.0.4" - estraverse "~1.5.0" - esutils "~1.0.0" - optionalDependencies: - source-map "~0.1.30" - -esniff@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308" - integrity sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg== - dependencies: - d "^1.0.1" - es5-ext "^0.10.62" - event-emitter "^0.3.5" - type "^2.7.2" - -esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esprima@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" - integrity sha512-rp5dMKN8zEs9dfi9g0X1ClLmV//WRyk/R15mppFNICIFRG5P92VP7Z04p8pk++gABo9W2tY+kHyu6P1mEHgmTA== - -estraverse@^4.2.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@~1.5.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.5.1.tgz#867a3e8e58a9f84618afb6c2ddbcd916b7cbaf71" - integrity sha512-FpCjJDfmo3vsc/1zKSeqR5k42tcIhxFIlvq+h9j0fO2q/h2uLKyweq7rYJ+0CoVvrGQOxIS5wyBrW/+vF58BUQ== - -estree-is-function@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/estree-is-function/-/estree-is-function-1.0.0.tgz#c0adc29806d7f18a74db7df0f3b2666702e37ad2" - integrity sha512-nSCWn1jkSq2QAtkaVLJZY2ezwcFO161HVc174zL1KPW3RJ+O6C3eJb8Nx7OXzvhoEv+nLgSR1g71oWUHUDTrJA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -esutils@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-1.0.0.tgz#8151d358e20c8acc7fb745e7472c0025fe496570" - integrity sha512-x/iYH53X3quDwfHRz4y8rn4XcEwwCJeWsul9pF1zldMbGtgOtMNBEOuYWwB1EQlK2LRa1fev3YAgym/RElp5Cg== - eve-raphael@0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/eve-raphael/-/eve-raphael-0.5.0.tgz#17c754b792beef3fa6684d79cf5a47c63c4cda30" integrity sha512-jrxnPsCGqng1UZuEp9DecX/AuSyAszATSjf4oEcRxvfxa1Oux4KkIPKBAAWWnpdwfARtr+Q0o9aPYWjsROD7ug== -event-emitter@^0.3.5, event-emitter@~0.3.5: - version "0.3.5" - resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" - integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== - dependencies: - d "1" - es5-ext "~0.10.14" - -ext@^1.1.2: - version "1.7.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" - integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== - dependencies: - type "^2.7.2" - -fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - flot@flot/flot#~0.8.3: version "0.8.3" resolved "https://codeload.github.com/flot/flot/tar.gz/453b017cc5acfd75e252b93e8635f57f4196d45d" @@ -567,11 +307,6 @@ functions-have-names@^1.2.2: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== -get-assigned-identifiers@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/get-assigned-identifiers/-/get-assigned-identifiers-1.2.0.tgz#6dbf411de648cbaf8d9169ebb0d2d576191e2ff1" - integrity sha512-mBBwmeGTrxEMO4pMaaf/uUEFHnYtwr8FTe8Y/mer4rcV/bye0qGm6pw1bGZFGStxC5O76c5ZAVBGnqHmOaJpdQ== - get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" @@ -612,7 +347,7 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" -has@^1.0.1, has@^1.0.3: +has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== @@ -631,7 +366,7 @@ immediate@~3.0.5: resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== -inherits@^2.0.3, inherits@~2.0.3: +inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -644,13 +379,6 @@ is-arguments@^1.0.4: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-core-module@^2.9.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" - integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== - dependencies: - has "^1.0.3" - is-date-object@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" @@ -671,6 +399,11 @@ isarray@~1.0.0: resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== +jpeg-exif@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/jpeg-exif/-/jpeg-exif-1.1.4.tgz#781a65b6cd74f62cb1c493511020f8d3577a1c2b" + integrity sha512-a+bKEcCjtuW5WTdgeXFzswSrdqi0jk4XlEtZlx5A94wCoBpFjfFTbo/Tra5SpNCl/YFZPvcV1dJc+TAYeg6ROQ== + jquery-highlight@3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/jquery-highlight/-/jquery-highlight-3.5.0.tgz#354fb3a8b98c594525ec1ccc003fd3d1dc305815" @@ -678,12 +411,12 @@ jquery-highlight@3.5.0: dependencies: jquery ">= 1.0.0" -jquery-ui@1.13.3: - version "1.13.3" - resolved "https://registry.yarnpkg.com/jquery-ui/-/jquery-ui-1.13.3.tgz#d9f5292b2857fa1f2fdbbe8f2e66081664eb9bc5" - integrity sha512-D2YJfswSJRh/B8M/zCowDpNFfwsDmtfnMPwjJTyvl+CBqzpYwQ+gFYIbUUlzijy/Qvoy30H1YhoSui4MNYpRwA== +jquery-ui@1.14.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/jquery-ui/-/jquery-ui-1.14.1.tgz#ba342ea3ffff662b787595391f607d923313e040" + integrity sha512-DhzsYH8VeIvOaxwi+B/2BCsFFT5EGjShdzOcm5DssWjtcpGWIMsn66rJciDA6jBruzNiLf1q0KvwMoX1uGNvnQ== dependencies: - jquery ">=1.8.0 <4.0.0" + jquery ">=1.12.0 <5.0.0" jquery.cookie@1.4.1: version "1.4.1" @@ -699,7 +432,7 @@ jquery.hotkeys@jeresig/jquery.hotkeys#master: version "0.2.0" resolved "https://codeload.github.com/jeresig/jquery.hotkeys/tar.gz/f24f1da275aab7881ab501055c256add6f690de4" -"jquery@>= 1.0.0", jquery@>=1.7, jquery@>=1.7.0, "jquery@>=1.8.0 <4.0.0", jquery@^3.7.1: +"jquery@>= 1.0.0", "jquery@>=1.12.0 <5.0.0", jquery@>=1.7, jquery@>=1.7.0, jquery@^3.7.1: version "3.7.1" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.7.1.tgz#083ef98927c9a6a74d05a6af02806566d16274de" integrity sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg== @@ -721,14 +454,6 @@ justgage@^1.7.0: dependencies: raphael "^2.3.0" -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - lie@~3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" @@ -736,35 +461,16 @@ lie@~3.3.0: dependencies: immediate "~3.0.5" -magic-string@0.25.1: - version "0.25.1" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.1.tgz#b1c248b399cd7485da0fe7385c2fc7011843266e" - integrity sha512-sCuTz6pYom8Rlt4ISPFn6wuFodbKMIHUMv4Qko9P17dpxb7s52KJTmRuZZqHdGmLCK9AOcDare039nRIcfdkEg== - dependencies: - sourcemap-codec "^1.4.1" - marked@^4.1.0: version "4.2.12" resolved "https://registry.yarnpkg.com/marked/-/marked-4.2.12.tgz#d69a64e21d71b06250da995dcd065c11083bebb5" integrity sha512-yr8hSKa3Fv4D3jdZmtMMPghgVt6TWbk86WQaWhDloQjRSQhMMYCAro7jP7VDJrjjdV8pxVxMssXS8B8Y5DZ5aw== -merge-source-map@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/merge-source-map/-/merge-source-map-1.0.4.tgz#a5de46538dae84d4114cc5ea02b4772a6346701f" - integrity sha512-PGSmS0kfnTnMJCzJ16BLLCEe6oeYCamKFFdQKshi4BmM6FUwipjVOcBFGxqtQtirtAG4iZvHlqST9CpZKqlRjA== - dependencies: - source-map "^0.5.6" - metismenu@~3.0.7: version "3.0.7" resolved "https://registry.yarnpkg.com/metismenu/-/metismenu-3.0.7.tgz#613dd01d14d053474b926a1ecac24d137c934aaa" integrity sha512-omMwIAahlzssjSi3xY9ijkhXI8qEaQTqBdJ9lHmfV5Bld2UkxO2h2M3yWsteAlGJ/nSHi4e69WHDE2r18Ickyw== -minimist@^1.1.3: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - moment@^2.30.1: version "2.30.1" resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae" @@ -774,16 +480,6 @@ morris.js@morrisjs/morris.js: version "0.5.1" resolved "https://codeload.github.com/morrisjs/morris.js/tar.gz/14530d0733801d5bef1264cf3d062ecace7e326b" -next-tick@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" - integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== - -object-inspect@^1.6.0: - version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== - object-is@^1.0.1: version "1.1.5" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" @@ -797,18 +493,6 @@ object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -optionator@^0.8.1: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - pako@^0.2.5: version "0.2.9" resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" @@ -819,45 +503,26 @@ pako@~1.0.2: resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -pdfmake@^0.2.11: - version "0.2.11" - resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.11.tgz#359e27dabc48ba6dc8f7ebc236c166eb98d39b5b" - integrity sha512-Ig9LBhIYWW8t0/MiXQPYOQdMgwjg+f3JS2iWA7q94Ftc4wSDO5VZP+a1+QN7uz3FbA7+vB4EEYfg3xU0wRPk8w== +pdfmake@^0.2.15: + version "0.2.15" + resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.15.tgz#86bbc2c854e8a1cc98d4d6394b39dae00cc3a3b0" + integrity sha512-Ryef9mjxo6q8dthhbssAK0zwCsPZ6Pl7kCHnIEXOvQdd79LUGZD6SHGi21YryFXczPjvw6V009uxQwp5iritcA== dependencies: - "@foliojs-fork/linebreak" "^1.1.1" - "@foliojs-fork/pdfkit" "^0.14.0" + "@foliojs-fork/linebreak" "^1.1.2" + "@foliojs-fork/pdfkit" "^0.15.1" iconv-lite "^0.6.3" - xmldoc "^1.1.2" + xmldoc "^1.3.0" png-js@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/png-js/-/png-js-1.0.0.tgz#e5484f1e8156996e383aceebb3789fd75df1874d" integrity sha512-k+YsbhpA9e+EFfKjTCH3VW6aoKlyNYI6NYdTfDL4CIvFnvsuO84ttonmZE7rc+v23SLTH8XX+5w/Ak9v0xGY4g== -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== - process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -quote-stream@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/quote-stream/-/quote-stream-1.0.2.tgz#84963f8c9c26b942e153feeb53aae74652b7e0b2" - integrity sha512-kKr2uQ2AokadPjvTyKJQad9xELbZwYzWlNfI3Uz2j/ib5u6H9lDP7fUUR//rMycd0gv4Z5P1qXMfXR8YpIxrjQ== - dependencies: - buffer-equal "0.0.1" - minimist "^1.1.3" - through2 "^2.0.0" - raphael@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/raphael/-/raphael-2.3.0.tgz#eabeb09dba861a1d4cee077eaafb8c53f3131f89" @@ -865,7 +530,7 @@ raphael@^2.3.0: dependencies: eve-raphael "0.5.0" -readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@~2.3.3, readable-stream@~2.3.6: +readable-stream@~2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -887,20 +552,6 @@ regexp.prototype.flags@^1.2.0: define-properties "^1.1.3" functions-have-names "^1.2.2" -resolve@1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" - integrity sha512-9znBF0vBcaSN3W2j7wKvdERPwqTxSpCq+if5C0WoTCyV9n24rua28jeuQ2pL/HOf+yUe/Mef+H/5p60K0Id3bg== - -resolve@^1.1.5: - version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" @@ -916,19 +567,6 @@ sax@^1.2.4: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== -scope-analyzer@^2.0.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/scope-analyzer/-/scope-analyzer-2.1.2.tgz#b958162feb59823c2835c7b0229187a97c77e9cd" - integrity sha512-5cfCmsTYV/wPaRIItNxatw02ua/MThdIUNnUOCYp+3LSEJvnG804ANw2VLaavNILIfWXF1D1G2KNANkBBvInwQ== - dependencies: - array-from "^2.1.1" - dash-ast "^2.0.1" - es6-map "^0.1.5" - es6-set "^0.1.5" - es6-symbol "^3.1.1" - estree-is-function "^1.0.0" - get-assigned-identifiers "^1.1.0" - select@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" @@ -939,65 +577,11 @@ setimmediate@^1.0.5: resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== -shallow-copy@~0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/shallow-copy/-/shallow-copy-0.0.1.tgz#415f42702d73d810330292cc5ee86eae1a11a170" - integrity sha512-b6i4ZpVuUxB9h5gfCxPiusKYkqTMOjEbBs4wMaFbkfia4yFv92UKZ6Df8WXcKbn08JNL/abvg3FnMAOfakDvUw== - -source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== - -source-map@~0.1.30: - version "0.1.43" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346" - integrity sha512-VtCvB9SIQhk3aF6h+N85EaqIaBFIAfZ9Cu+NJHHVvc8BbEcnvDcFw6sqQ2dQrT6SlOrZq3tIvyD9+EGq/lJryQ== - dependencies: - amdefine ">=0.0.4" - -source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sourcemap-codec@^1.4.1: - version "1.4.8" - resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" - integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== - startbootstrap-sb-admin-2@1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/startbootstrap-sb-admin-2/-/startbootstrap-sb-admin-2-1.0.7.tgz#ef36a90903afb4a84a25c329b0292d06bf05b130" integrity sha512-+CAll0cvvIZu/KBX3epjZrRRaGu7p95y2InZvhxgnKLH3p6JxT6lxJuwbQw9EVZfNckCZEhpJ0Voux9C47mTrg== -static-eval@^2.0.5: - version "2.1.0" - resolved "https://registry.yarnpkg.com/static-eval/-/static-eval-2.1.0.tgz#a16dbe54522d7fa5ef1389129d813fd47b148014" - integrity sha512-agtxZ/kWSsCkI5E4QifRwsaPs0P0JmZV6dkLz6ILYfFYQGn+5plctanRN+IC8dJRiFkyXHrwEE3W9Wmx67uDbw== - dependencies: - escodegen "^1.11.1" - -static-module@^3.0.2: - version "3.0.4" - resolved "https://registry.yarnpkg.com/static-module/-/static-module-3.0.4.tgz#bfbd1d1c38dd1fbbf0bb4af0c1b3ae18a93a2b68" - integrity sha512-gb0v0rrgpBkifXCa3yZXxqVmXDVE+ETXj6YlC/jt5VzOnGXR2C15+++eXuMDUYsePnbhf+lwW0pE1UXyOLtGCw== - dependencies: - acorn-node "^1.3.0" - concat-stream "~1.6.0" - convert-source-map "^1.5.1" - duplexer2 "~0.1.4" - escodegen "^1.11.1" - has "^1.0.1" - magic-string "0.25.1" - merge-source-map "1.0.4" - object-inspect "^1.6.0" - readable-stream "~2.3.3" - scope-analyzer "^2.0.1" - shallow-copy "~0.0.1" - static-eval "^2.0.5" - through2 "~2.0.3" - string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" @@ -1005,24 +589,6 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -through2@^2.0.0, through2@~2.0.3: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - -through@~2.3.4: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== - tiny-emitter@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" @@ -1033,28 +599,6 @@ tiny-inflate@^1.0.0, tiny-inflate@^1.0.2: resolved "https://registry.yarnpkg.com/tiny-inflate/-/tiny-inflate-1.0.3.tgz#122715494913a1805166aaf7c93467933eea26c4" integrity sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw== -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== - dependencies: - prelude-ls "~1.1.2" - -type@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" - integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== - -type@^2.7.2: - version "2.7.2" - resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" - integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== - typo-js@*: version "1.2.2" resolved "https://registry.yarnpkg.com/typo-js/-/typo-js-1.2.2.tgz#340484d81fe518e77c81a5a770162b14492f183b" @@ -1081,19 +625,9 @@ util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -word-wrap@~1.2.3: - version "1.2.4" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f" - integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA== - -xmldoc@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/xmldoc/-/xmldoc-1.2.0.tgz#7554371bfd8c138287cff01841ae4566d26e5541" - integrity sha512-2eN8QhjBsMW2uVj7JHLHkMytpvGHLHxKXBy4J3fAT/HujsEtM6yU84iGjpESYGHg6XwK0Vu4l+KgqQ2dv2cCqg== +xmldoc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/xmldoc/-/xmldoc-1.3.0.tgz#7823225b096c74036347c9ec5924d06b6a3cebab" + integrity sha512-y7IRWW6PvEnYQZNZFMRLNJw+p3pezM4nKYPfr15g4OOW9i8VpeydycFuipE2297OvZnh3jSb2pxOt9QpkZUVng== dependencies: sax "^1.2.4" - -xtend@^4.0.2, xtend@~4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== diff --git a/docker-compose.override.debug.yml b/docker-compose.override.debug.yml deleted file mode 100644 index 58af41549f7..00000000000 --- a/docker-compose.override.debug.yml +++ /dev/null @@ -1,60 +0,0 @@ ---- -services: - uwsgi: - entrypoint: ['/wait-for-it.sh', '${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}', '-t', '30', '--', '/entrypoint-uwsgi-dev.sh'] - volumes: - - '.:/app:z' - environment: - PYTHONWARNINGS: error # We are strict about Warnings during debugging - DD_DEBUG: 'True' - DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" - DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" - DD_EMAIL_URL: "smtp://mailhog:1025" - ports: - - target: ${DD_DEBUG_PORT:-3000} - published: ${DD_DEBUG_PORT:-3000} - protocol: tcp - mode: host - celeryworker: - volumes: - - '.:/app:z' - environment: - PYTHONWARNINGS: error # We are strict about Warnings during debugging - DD_DEBUG: 'True' - DD_EMAIL_URL: "smtp://mailhog:1025" - celerybeat: - environment: - PYTHONWARNINGS: error # We are strict about Warnings during debugging - DD_DEBUG: 'True' - volumes: - - '.:/app:z' - initializer: - volumes: - - '.:/app:z' - environment: - PYTHONWARNINGS: error # We are strict about Warnings during debugging - DD_DEBUG: 'True' - DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" - DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" - nginx: - volumes: - - './dojo/static/dojo:/usr/share/nginx/html/static/dojo' - postgres: - ports: - - target: ${DD_DATABASE_PORT:-5432} - published: ${DD_DATABASE_PORT:-5432} - protocol: tcp - mode: host - mailhog: - image: mailhog/mailhog:v1.0.1@sha256:8d76a3d4ffa32a3661311944007a415332c4bb855657f4f6c57996405c009bea - entrypoint: [ "/bin/sh", "-c", "MailHog &>/dev/null" ] - # inspired by https://github.com/mailhog/MailHog/issues/56#issuecomment-291968642 - ports: - - target: 1025 - published: 1025 - protocol: tcp - mode: host - - target: 8025 - published: 8025 - protocol: tcp - mode: host diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index 185ff0748f7..581dd627900 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -5,7 +5,8 @@ services: volumes: - '.:/app:z' environment: - PYTHONWARNINGS: always # We are strict during development so Warnings needs to be more verbose + PYTHONWARNINGS: error # We are strict about Warnings during development + DD_DEBUG: 'True' DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" DD_EMAIL_URL: "smtp://mailhog:1025" @@ -13,18 +14,21 @@ services: volumes: - '.:/app:z' environment: - PYTHONWARNINGS: always # We are strict during development so Warnings needs to be more verbose + PYTHONWARNINGS: error # We are strict about Warnings during development + DD_DEBUG: 'True' DD_EMAIL_URL: "smtp://mailhog:1025" celerybeat: volumes: - '.:/app:z' environment: - PYTHONWARNINGS: always # We are strict during development so Warnings needs to be more verbose + PYTHONWARNINGS: error # We are strict about Warnings during development + DD_DEBUG: 'True' initializer: volumes: - '.:/app:z' environment: - PYTHONWARNINGS: always # We are strict during development so Warnings needs to be more verbose + PYTHONWARNINGS: error # We are strict about Warnings during development + DD_DEBUG: 'True' DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" nginx: @@ -49,3 +53,5 @@ services: published: 8025 protocol: tcp mode: host + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a diff --git a/docker-compose.override.unit_tests.yml b/docker-compose.override.unit_tests.yml index 164d7a87084..baf50d51e60 100644 --- a/docker-compose.override.unit_tests.yml +++ b/docker-compose.override.unit_tests.yml @@ -1,7 +1,7 @@ --- services: nginx: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'nginx'] volumes: - defectdojo_media_unit_tests:/usr/share/nginx/html/media @@ -30,13 +30,13 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' celerybeat: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery beat'] celeryworker: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery worker'] initializer: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'initializer'] postgres: ports: @@ -49,8 +49,10 @@ services: volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data redis: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker-compose.override.unit_tests_cicd.yml b/docker-compose.override.unit_tests_cicd.yml index b39f4cf034d..1ca70557d41 100644 --- a/docker-compose.override.unit_tests_cicd.yml +++ b/docker-compose.override.unit_tests_cicd.yml @@ -1,7 +1,7 @@ --- services: nginx: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'nginx'] volumes: - defectdojo_media_unit_tests:/usr/share/nginx/html/media @@ -29,13 +29,13 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' celerybeat: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery beat'] celeryworker: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery worker'] initializer: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'initializer'] postgres: ports: @@ -48,8 +48,10 @@ services: volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data redis: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker-compose.yml b/docker-compose.yml index df2182f72ef..f236ae4a87c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -103,7 +103,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:16.4-alpine@sha256:492898505cb45f9835acc327e98711eaa9298ed804e0bb36f29e08394229550d + image: postgres:17.1-alpine@sha256:0d9624535618a135c5453258fd629f4963390338b11aaffb92292c12df3a6c17 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} @@ -111,7 +111,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data redis: - image: redis:7.2.5-alpine@sha256:0bc09d9f486508aa42ecc2f18012bb1e3a1b2744ef3a6ad30942fa12579f0b03 + image: redis:7.2.5-alpine@sha256:6aaf3f5e6bc8a592fbfe2cccf19eb36d27c39d12dab4f4b01556b7449e7b1f44 volumes: - defectdojo_redis:/data volumes: diff --git a/docker/docker-compose-check.sh b/docker/docker-compose-check.sh index b51cf45674f..d24419de2ee 100755 --- a/docker/docker-compose-check.sh +++ b/docker/docker-compose-check.sh @@ -6,11 +6,11 @@ current=$(docker compose version --short) echo 'Checking docker compose version' if [[ $main -lt 2 ]]; then - echo "$current is not a supported docker-compose version, please upgrade to the minimum supported version: 2.0" + echo "$current is not a supported 'docker compose' version, please upgrade to the minimum supported version: 2.0" exit 1 elif [[ $main -eq 1 ]]; then if [[ $minor -lt 28 ]]; then - echo "$current is not supported docker-compose version, please upgrade to minimal supported version:1.28" + echo "$current is not supported 'docker compose' version, please upgrade to minimal supported version:1.28" exit 1 fi fi diff --git a/docker/entrypoint-initializer.sh b/docker/entrypoint-initializer.sh index c6f86970d89..08e77dc46ca 100755 --- a/docker/entrypoint-initializer.sh +++ b/docker/entrypoint-initializer.sh @@ -154,7 +154,7 @@ EOD echo "Importing fixtures all at once" python3 manage.py loaddata system_settings initial_banner_conf product_type test_type \ development_environment benchmark_type benchmark_category benchmark_requirement \ - language_type objects_review regulation initial_surveys role + language_type objects_review regulation initial_surveys role sla_configurations echo "UPDATE dojo_system_settings SET jira_webhook_secret='$DD_JIRA_WEBHOOK_SECRET'" | python manage.py dbshell diff --git a/docker/extra_settings/README.md b/docker/extra_settings/README.md index e919e1917bc..b3a8fc0eddb 100644 --- a/docker/extra_settings/README.md +++ b/docker/extra_settings/README.md @@ -6,7 +6,7 @@ If a file if placed here, it will be copied on startup to `dojo/settings/local_s For an example, see [template-local_settings](../../dojo/settings/template-local_settings) Please note this copy action could fail if you have mounted the full `dojo/` folder, but that is owned by a different user/group. -That's why this copy action only happens in docker-compose release mode, and not in dev/debug/unit_tests/integration_tests modes. +That's why this copy action only happens in docker compose release mode, and not in dev/debug/unit_tests/integration_tests modes. For advanced usage you can also place a `settings.dist.py` or `settings.py` file. These will also be copied on startup to dojo/settings. diff --git a/docker/install_chrome_dependencies.py b/docker/install_chrome_dependencies.py index 1b8f29585ea..b0ddda14755 100644 --- a/docker/install_chrome_dependencies.py +++ b/docker/install_chrome_dependencies.py @@ -10,7 +10,13 @@ def find_packages(library_name): - stdout = run_command(["apt-file", "search", library_name]) + stdout, stderr, status_code = run_command(["apt-file", "search", library_name]) + # Check if ldd has failed for a good reason, or if there are no results + if status_code != 0: + # Any other case should be be caught + msg = f"apt-file search (exit code {status_code}): {stderr}" + raise ValueError(msg) + if not stdout.strip(): return [] libs = [line.split(":")[0] for line in stdout.strip().split("\n")] @@ -18,35 +24,33 @@ def find_packages(library_name): def run_command(cmd, cwd=None, env=None): - result = subprocess.run(cmd, cwd=cwd, env=env, capture_output=True, text=True) - return result.stdout + # Do not raise exception here because some commands are too loose with negative exit codes + result = subprocess.run(cmd, cwd=cwd, env=env, capture_output=True, text=True, check=False) + return result.stdout.strip(), result.stderr.strip(), result.returncode def ldd(file_path): - stdout = run_command(["ldd", file_path]) - # For simplicity, I'm assuming if we get an error, the code is non-zero. - try: - result = subprocess.run( - ["ldd", file_path], capture_output=True, text=True, - ) - stdout = result.stdout - code = result.returncode - except subprocess.CalledProcessError: - stdout = "" - code = 1 - return stdout, code + stdout, stderr, status_code = run_command(["ldd", file_path]) + # Check if ldd has failed for a good reason, or if there are no results + if status_code != 0: + # It is often the case when stdout will be empty. This is not an error + if not stdout: + return stdout, status_code + # Any other case should be be caught + msg = f"ldd (exit code {status_code}): {stderr}" + raise ValueError(msg) + + return stdout, status_code raw_deps = ldd("/opt/chrome/chrome") dependencies = raw_deps[0].splitlines() - missing_deps = { r[0].strip() for d in dependencies for r in [d.split("=>")] if len(r) == 2 and r[1].strip() == "not found" } - missing_packages = [] for d in missing_deps: all_packages = find_packages(d) @@ -59,5 +63,4 @@ def ldd(file_path): ] for p in packages: missing_packages.append(p) - logger.info("missing_packages: " + (" ".join(missing_packages))) diff --git a/docker/setEnv.sh b/docker/setEnv.sh index c4c6b9d7ef2..b9336535e39 100755 --- a/docker/setEnv.sh +++ b/docker/setEnv.sh @@ -5,7 +5,6 @@ target_dir="${0%/*}/.." override_link='docker-compose.override.yml' override_file_dev='docker-compose.override.dev.yml' -override_file_debug='docker-compose.override.debug.yml' override_file_unit_tests='docker-compose.override.unit_tests.yml' override_file_unit_tests_cicd='docker-compose.override.unit_tests_cicd.yml' override_file_integration_tests='docker-compose.override.integration_tests.yml' @@ -77,19 +76,6 @@ function set_dev { fi } -function set_debug { - get_current - if [ "${current_env}" != debug ] - then - docker compose down - rm -f ${override_link} - ln -s ${override_file_debug} ${override_link} - echo "Now using 'debug' configuration." - else - echo "Already using 'debug' configuration." - fi -} - function set_unit_tests { get_current if [ "${current_env}" != unit_tests ] diff --git a/docs/assets/icons/logo.svg b/docs/assets/icons/logo.svg index 71a24baac76..75983a52716 100644 --- a/docs/assets/icons/logo.svg +++ b/docs/assets/icons/logo.svg @@ -1,244 +1,15 @@ - - - - - - - - image/svg+xml - - - - - - - - - + + + + + + + \ No newline at end of file diff --git a/docs/config.dev.toml b/docs/config.dev.toml index 65fff4564ba..de3d1b24c36 100644 --- a/docs/config.dev.toml +++ b/docs/config.dev.toml @@ -77,6 +77,12 @@ weight = 1 pre = "" url = "https://github.com/DefectDojo/django-DefectDojo" +[[menu.main]] + name = "Knowledge Base" + weight = 50 + pre = "" + url = "https://support.defectdojo.com" + [markup] [markup.goldmark] [markup.goldmark.renderer] diff --git a/docs/config.master.toml b/docs/config.master.toml index 29c4e0a6adc..22f2f7748ab 100644 --- a/docs/config.master.toml +++ b/docs/config.master.toml @@ -77,6 +77,12 @@ weight = 1 pre = "" url = "https://github.com/DefectDojo/django-DefectDojo" +[[menu.main]] + name = "Knowledge Base" + weight = 50 + pre = "" + url = "https://support.defectdojo.com" + [markup] [markup.goldmark] [markup.goldmark.renderer] diff --git a/docs/content/en/_index.md b/docs/content/en/_index.md index ce75fcc5b88..7dceb1bf342 100644 --- a/docs/content/en/_index.md +++ b/docs/content/en/_index.md @@ -40,7 +40,7 @@ The open-source edition is [available on GitHub](https://github.com/DefectDojo/django-DefectDojo). A running example is available on [our demo server](https://demo.defectdojo.org), -using the credentials `admin` / `defectdojo@demo#appsec`. Note: The demo +using the credentials `admin` / `1Defectdojo@demo#appsec`. Note: The demo server is refreshed regularly and provisioned with some sample data. ### DefectDojo Pro and Enterprise diff --git a/docs/content/en/contributing/documentation.md b/docs/content/en/contributing/documentation.md index 95313d28344..7410f822eb3 100644 --- a/docs/content/en/contributing/documentation.md +++ b/docs/content/en/contributing/documentation.md @@ -14,7 +14,7 @@ Static files for the webside are build with github actions and are publish in th 2. Install JavaScript packages - To build or update your site’s CSS resources, you also need PostCSS to create the final assets. If you need to install it, you must have a recent version of NodeJS installed on your machine so you can use npm, the Node package manager. By default, npm installs tools under the directory where you run npm install: + To build or update your site's CSS resources, you also need PostCSS to create the final assets. If you need to install it, you must have a recent version of NodeJS installed on your machine so you can use npm, the Node package manager. By default, npm installs tools under the directory where you run npm install: {{< highlight bash >}} cd docs diff --git a/docs/content/en/contributing/how-to-write-a-parser.md b/docs/content/en/contributing/how-to-write-a-parser.md index 89545111344..5652f0dbc59 100644 --- a/docs/content/en/contributing/how-to-write-a-parser.md +++ b/docs/content/en/contributing/how-to-write-a-parser.md @@ -15,10 +15,10 @@ All commands assume that you're located at the root of the django-DefectDojo clo - Checkout `dev` and make sure you're up to date with the latest changes. - It's advised that you create a dedicated branch for your development, such as `git checkout -b parser-name`. -It is easiest to use the docker-compose deployment as it has hot-reload capbility for uWSGI. -Set up your environment to use the debug environment: +It is easiest to use the docker compose deployment as it has hot-reload capbility for uWSGI. +Set up your environment to use the dev environment: -`$ docker/setEnv.sh debug` +`$ docker/setEnv.sh dev` Please have a look at [DOCKER.md](https://github.com/DefectDojo/django-DefectDojo/blob/master/readme-docs/DOCKER.md) for more details. @@ -27,7 +27,7 @@ Please have a look at [DOCKER.md](https://github.com/DefectDojo/django-DefectDoj You will want to build your docker images locally, and eventually pass in your local user's `uid` to be able to write to the image (handy for database migration files). Assuming your user's `uid` is `1000`, then: {{< highlight bash >}} -$ docker-compose build --build-arg uid=1000 +$ docker compose build --build-arg uid=1000 {{< /highlight >}} ## Which files do you need to modify? @@ -94,7 +94,7 @@ class MyToolParser(object): ## API Parsers -DefectDojo has a limited number of API parsers. While we won’t remove these connectors, adding API connectors has been problematic and thus we cannot accept new API parsers / connectors from the community at this time for supportability reasonsing. To maintain a high quality API connector, it is necessary to have a license to the tool. To get that license requires partnership with the author or vendor. We're close to announcing a new program to help address this and bring API connectors to DefectDojo. +DefectDojo has a limited number of API parsers. While we won't remove these connectors, adding API connectors has been problematic and thus we cannot accept new API parsers / connectors from the community at this time for supportability reasonsing. To maintain a high quality API connector, it is necessary to have a license to the tool. To get that license requires partnership with the author or vendor. We're close to announcing a new program to help address this and bring API connectors to DefectDojo. ## Template Generator @@ -279,7 +279,7 @@ This ensures the file is closed at the end of the with statement, even if an exc ### Test database -To test your unit tests locally, you first need to grant some rights. Get your MySQL root password from the docker-compose logs, login as root and issue the following commands: +To test your unit tests locally, you first need to grant some rights. Get your MySQL root password from the docker compose logs, login as root and issue the following commands: {{< highlight mysql >}} MYSQL> grant all privileges on test_defectdojo.* to defectdojo@'%'; @@ -291,17 +291,29 @@ MYSQL> flush privileges; This local command will launch the unit test for your new parser {{< highlight bash >}} -$ docker-compose exec uwsgi bash -c 'python manage.py test unittests.tools.. -v2' +$ docker compose exec uwsgi bash -c 'python manage.py test unittests.tools.. -v2' +{{< /highlight >}} + +or like this: + +{{< highlight bash >}} +$ ./dc-unittest.sh --test-case unittests.tools.. {{< /highlight >}} Example for the blackduck hub parser: {{< highlight bash >}} -$ docker-compose exec uwsgi bash -c 'python manage.py test unittests.tools.test_blackduck_csv_parser.TestBlackduckHubParser -v2' +$ docker compose exec uwsgi bash -c 'python manage.py test unittests.tools.test_blackduck_csv_parser.TestBlackduckHubParser -v2' +{{< /highlight >}} + +or like this: + +{{< highlight bash >}} +$ ./dc-unittest.sh --test-case unittests.tools.test_blackduck_csv_parser.TestBlackduckHubParser {{< /highlight >}} {{% alert title="Information" color="info" %}} -If you want to run all unit tests, simply run `$ docker-compose exec uwsgi bash -c 'python manage.py test unittests -v2'` +If you want to run all unit tests, simply run `$ docker compose exec uwsgi bash -c 'python manage.py test unittests -v2'` {{% /alert %}} ### Endpoint validation @@ -330,7 +342,7 @@ In the event where you'd have to change the model, e.g. to increase a database c * Create a new migration file in dojo/db_migrations by running and including as part of your PR {{< highlight bash >}} - $ docker-compose exec uwsgi bash -c 'python manage.py makemigrations -v2' + $ docker compose exec uwsgi bash -c 'python manage.py makemigrations -v2' {{< /highlight >}} ### Accept a different type of file to upload diff --git a/docs/content/en/getting_started/architecture.md b/docs/content/en/getting_started/architecture.md index 676d8184024..fe53d0ef3f1 100644 --- a/docs/content/en/getting_started/architecture.md +++ b/docs/content/en/getting_started/architecture.md @@ -20,8 +20,8 @@ dynamic content. ## Message Broker -The application server sends tasks to a [Message Broker](https://docs.celeryproject.org/en/stable/getting-started/brokers/index.html) -for asynchronous execution. +The application server sends tasks to a [Message Broker](https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html) +for asynchronous execution. Currently, only [Redis](https://github.com/redis/redis) is supported as a broker. ## Celery Worker diff --git a/docs/content/en/getting_started/installation.md b/docs/content/en/getting_started/installation.md index a127f36e492..8f6affa702e 100644 --- a/docs/content/en/getting_started/installation.md +++ b/docs/content/en/getting_started/installation.md @@ -14,11 +14,11 @@ See instructions in [DOCKER.md](}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation' {{< /highlight >}} DEBUG output can be obtains via `-v 3`, but only after increasing the logging to DEBUG level in your settings.dist.py or local_settings.py file {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation -v 3' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation -v 3' {{< /highlight >}} At the end of the command a semicolon seperated CSV summary will be printed. This can be captured by redirecting stdout to a file: {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation > jira_reconciliation.csv' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation > jira_reconciliation.csv' {{< /highlight >}} diff --git a/docs/content/en/integrations/languages.md b/docs/content/en/integrations/languages.md index 17a322c8f90..a78ed137e69 100644 --- a/docs/content/en/integrations/languages.md +++ b/docs/content/en/integrations/languages.md @@ -2,7 +2,7 @@ title: "Languages and lines of code" description: "You can import an analysis of languages used in a project, including lines of code." draft: false -weight: 9 +weight: 10 --- ## Import of languages for a project diff --git a/docs/content/en/integrations/ldap-authentication.md b/docs/content/en/integrations/ldap-authentication.md index 17697043736..307f1029a0a 100644 --- a/docs/content/en/integrations/ldap-authentication.md +++ b/docs/content/en/integrations/ldap-authentication.md @@ -116,7 +116,7 @@ Read the docs for Django Authentication with LDAP here: https://django-auth-ldap #### docker-compose.yml -In order to pass the variables to the settings.dist.py file via docker, it's a good idea to add these to the docker-compose file. +In order to pass the variables to the settings.dist.py file via docker, it's a good idea to add these to the docker compose file. You can do this by adding the following variables to the environment section for the uwsgi image: ```yaml diff --git a/docs/content/en/integrations/notification_webhooks/_index.md b/docs/content/en/integrations/notification_webhooks/_index.md new file mode 100644 index 00000000000..cbe9294041e --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/_index.md @@ -0,0 +1,79 @@ +--- +title: "Notification Webhooks (experimental)" +description: "How to setup and use webhooks" +weight: 7 +chapter: true +--- + +Webhooks are HTTP requests coming from the DefectDojo instance towards a user-defined webserver which expects this kind of incoming traffic. + +## Transition graph: + +It is not unusual that in some cases a webhook can not be delivered. It is usually connected to network issues, server misconfiguration, or running upgrades on the server. DefectDojo needs to react to these outages. It might temporarily or permanently disable related endpoints. The following graph shows how it might change the status of the webhook definition based on HTTP responses (or manual user interaction). + +```mermaid +flowchart TD + + START{{Endpoint created}} + ALL{All states} + STATUS_ACTIVE([STATUS_ACTIVE]) + STATUS_INACTIVE_TMP + STATUS_INACTIVE_PERMANENT + STATUS_ACTIVE_TMP([STATUS_ACTIVE_TMP]) + END{{Endpoint removed}} + + START ==> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 200 or 201 --> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 5xx
or HTTP 429
or Timeout--> STATUS_INACTIVE_TMP + STATUS_ACTIVE --Any HTTP 4xx response
or any other HTTP response
or non-HTTP error--> STATUS_INACTIVE_PERMANENT + STATUS_INACTIVE_TMP -.After 60s.-> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h
from the first error-->STATUS_INACTIVE_TMP + STATUS_ACTIVE_TMP -.After 24h.-> STATUS_ACTIVE + STATUS_ACTIVE_TMP --HTTP 200 or 201 --> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h from the first error
or any other HTTP response or error--> STATUS_INACTIVE_PERMANENT + ALL ==Activation by user==> STATUS_ACTIVE + ALL ==Deactivation by user==> STATUS_INACTIVE_PERMANENT + ALL ==Removal of endpoint by user==> END +``` + +Notes: + +1. Transitions: + - bold: manual changes by user + - dotted: automated by celery + - others: based on responses on webhooks +1. Nodes: + - Stadium-shaped: Active - following webhook can be sent + - Rectangles: Inactive - performing of webhook will fail (and not retried) + - Hexagonal: Initial and final states + - Rhombus: All states (meta node to make the graph more readable) + +## Body and Headers + +The body of each request is JSON which contains data about related events like names and IDs of affected elements. +Examples of bodies are on pages related to each event (see below). + +Each request contains the following headers. They might be useful for better handling of events by the server receiving them. + +```yaml +User-Agent: DefectDojo- +X-DefectDojo-Event: +X-DefectDojo-Instance: +``` +## Disclaimer + +This functionality is new and in experimental mode. This means functionality might generate breaking changes in following DefectDojo releases and might not be considered final. + +However, the community is open to feedback to make this functionality better and get it stable as soon as possible. + +## Roadmap + +There are a couple of known issues that are expected to be resolved as soon as core functionality is considered ready. + +- Support events - Not only adding products, product types, engagements, tests, or upload of new scans but also events around SLA +- User webhook - right now only admins can define webhooks; in the future, users will also be able to define their own +- Improvement in UI - add filtering and pagination of webhook endpoints + +## Events + + diff --git a/docs/content/en/integrations/notification_webhooks/engagement_added.md b/docs/content/en/integrations/notification_webhooks/engagement_added.md new file mode 100644 index 00000000000..36e31586a50 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/engagement_added.md @@ -0,0 +1,39 @@ +--- +title: "Event: engagement_added" +weight: 3 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: engagement_added +``` + +## Event HTTP body +```json +{ + "description": "", + "title": "", + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7", + "user": null +} +``` diff --git a/docs/content/en/integrations/notification_webhooks/product_added.md b/docs/content/en/integrations/notification_webhooks/product_added.md new file mode 100644 index 00000000000..dea3cd27f2a --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_added.md @@ -0,0 +1,33 @@ +--- +title: "Event: product_added" +weight: 2 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_added +``` + +## Event HTTP body +```json +{ + "description": "", + "title": "", + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4", + "user": null +} +``` diff --git a/docs/content/en/integrations/notification_webhooks/product_type_added.md b/docs/content/en/integrations/notification_webhooks/product_type_added.md new file mode 100644 index 00000000000..e5db4139297 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_type_added.md @@ -0,0 +1,27 @@ +--- +title: "Event: product_type_added" +weight: 1 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_type_added +``` + +## Event HTTP body +```json +{ + "description": "", + "title": "", + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4", + "user": null +} +``` diff --git a/docs/content/en/integrations/notification_webhooks/scan_added.md b/docs/content/en/integrations/notification_webhooks/scan_added.md new file mode 100644 index 00000000000..ea1a6bffa3d --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/scan_added.md @@ -0,0 +1,91 @@ +--- +title: "Event: scan_added and scan_added_empty" +weight: 5 +chapter: true +--- + +Event `scan_added_empty` describes a situation when reimport did not affect the existing test (no finding has been created or closed). + +## Event HTTP header for scan_added +```yaml +X-DefectDojo-Event: scan_added +``` + +## Event HTTP header for scan_added_empty +```yaml +X-DefectDojo-Event: scan_added_empty +``` + +## Event HTTP body +```json +{ + "description": "", + "title": "", + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "finding_count": 4, + "findings": { + "mitigated": [ + { + "id": 233, + "severity": "Medium", + "title": "Mitigated Finding", + "url_api": "http://localhost:8080/api/v2/findings/233/", + "url_ui": "http://localhost:8080/finding/233" + } + ], + "new": [ + { + "id": 232, + "severity": "Critical", + "title": "New Finding", + "url_api": "http://localhost:8080/api/v2/findings/232/", + "url_ui": "http://localhost:8080/finding/232" + } + ], + "reactivated": [ + { + "id": 234, + "severity": "Low", + "title": "Reactivated Finding", + "url_api": "http://localhost:8080/api/v2/findings/234/", + "url_ui": "http://localhost:8080/finding/234" + } + ], + "untouched": [ + { + "id": 235, + "severity": "Info", + "title": "Untouched Finding", + "url_api": "http://localhost:8080/api/v2/findings/235/", + "url_ui": "http://localhost:8080/finding/235" + } + ] + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` diff --git a/docs/content/en/integrations/notification_webhooks/test_added.md b/docs/content/en/integrations/notification_webhooks/test_added.md new file mode 100644 index 00000000000..bf6d71dc6f5 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/test_added.md @@ -0,0 +1,45 @@ +--- +title: "Event: test_added" +weight: 4 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: test_added +``` + +## Event HTTP body +```json +{ + "description": "", + "title": "", + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` diff --git a/docs/content/en/integrations/notifications.md b/docs/content/en/integrations/notifications.md index d5af295f0eb..803388797cd 100644 --- a/docs/content/en/integrations/notifications.md +++ b/docs/content/en/integrations/notifications.md @@ -18,6 +18,7 @@ The following notification methods currently exist: - Email - Slack - Microsoft Teams + - Webhooks - Alerts within DefectDojo (default) You can set these notifications on a global scope (if you have @@ -124,4 +125,8 @@ However, there is a specific use-case when the user decides to disable notificat The scope of this setting is customizable (see environmental variable `DD_NOTIFICATIONS_SYSTEM_LEVEL_TRUMP`). -For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) \ No newline at end of file +For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) + +## Webhooks (experimental) + +DefectDojo also supports webhooks that follow the same events as other notifications (you can be notified in the same situations). Details about setup are described in [related page](../notification_webhooks/). diff --git a/docs/content/en/integrations/parsers/file/appcheck_web_application_scanner.md b/docs/content/en/integrations/parsers/file/appcheck_web_application_scanner.md new file mode 100644 index 00000000000..5d003c8c4ec --- /dev/null +++ b/docs/content/en/integrations/parsers/file/appcheck_web_application_scanner.md @@ -0,0 +1,8 @@ +--- +title: "AppCheck Web Application Scanner" +toc_hide: true +--- +Accepts AppCheck Web Application Scanner output in .json format. + +### Sample Scan Data +Sample AppCheck Web Application Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/appcheck_web_application_scanner). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/aws_inspector2.md b/docs/content/en/integrations/parsers/file/aws_inspector2.md new file mode 100644 index 00000000000..d7507b61688 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/aws_inspector2.md @@ -0,0 +1,24 @@ +--- +title: "AWS Inspector2 Scanner" +toc_hide: true +--- + +### File Types +AWS Inspector2 report can be imported in json format. Inspector2 name comes from API calls to "modern" Inspector API - `aws inspector2` as opposite to Classic Inspector (previous version of the service), this is an example of how such report can be generated: `aws inspector2 list-findings --filter-criteria '{"resourceId":[{"comparison":"EQUALS","value":"i-instance_id_here"}]}' --region us-east-1 > inspector2_findings.json` + + +This parser can help to get findings in a delegated admin account for AWS Inspector or in a standalone AWS account. The parser is developed mostly for a scenario where findings are obtained for a specific resource like an ECR image or an instance, and uploaded to a test in a DefectDojo engagement that represents a branch from a git repository. + + +A minimal valid json file with no findings: + +```json +{ + "findings": [] +} +``` + +Detailed API response format can be obtained [here](https://docs.aws.amazon.com/inspector/v2/APIReference/API_Finding.html) + +### Sample Scan Data +Sample AWS Inspector2 findings can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/aws_inspector2). diff --git a/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md b/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md index a51cea701a3..4d272b2d5be 100644 --- a/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md +++ b/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md @@ -6,12 +6,12 @@ toc_hide: true #### **What** #### Black Duck Binary Analysis gives you visibility into open source and third-party dependencies that have been compiled into executables, libraries, containers, and firmware. You can analyze individual files using an intuitive user interface or Black Duck multifactor open source detection, which automates the scanning of binary artifacts. -Using a combination of static and string analysis techniques coupled with fuzzy matching against the Black Duck KnowledgeBase, Black Duck Binary Analysis quickly and reliably identifies components, even if they’ve been modified. +Using a combination of static and string analysis techniques coupled with fuzzy matching against the Black Duck KnowledgeBase, Black Duck Binary Analysis quickly and reliably identifies components, even if they've been modified. For more info, check out Black Duck Binary Analysis [here](https://www.synopsys.com/software-integrity/software-composition-analysis-tools/binary-analysis.html). #### **Why** #### -Open source vulnerabilities aren’t the only security issues that might be lurking in application binaries. +Open source vulnerabilities aren't the only security issues that might be lurking in application binaries. Black Duck Binary Analysis can also detect if sensitive information like email addresses, authorization tokens, compiler switches, and passwords are exposed, and it identifies when mobile applications request excessive permissions—all of which puts your organization and users' personal data at risk. diff --git a/docs/content/en/integrations/parsers/file/invicti.md b/docs/content/en/integrations/parsers/file/invicti.md new file mode 100644 index 00000000000..c0ffda1a48e --- /dev/null +++ b/docs/content/en/integrations/parsers/file/invicti.md @@ -0,0 +1,9 @@ +--- +title: "Invicti" +toc_hide: true +--- +Vulnerabilities List - JSON report + +### Sample Scan Data + +Sample Invicti scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/invicti). diff --git a/docs/content/en/integrations/parsers/file/krakend_audit.md b/docs/content/en/integrations/parsers/file/krakend_audit.md new file mode 100644 index 00000000000..9598ce343b8 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/krakend_audit.md @@ -0,0 +1,11 @@ +--- +title: "KrakenD Audit Scan" +toc_hide: true +--- +Import KrakenD Audit Scan results in JSON format. You can use the following command to audit the KrakenD configuration which then can be uploaded to DefectDojo: +``` +krakend audit -c krakend.json -f "{{ marshal . }}" >> recommendations.json +``` + +### Sample Scan Data +Sample KrakenD Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/krakend_audit). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/legitify.md b/docs/content/en/integrations/parsers/file/legitify.md new file mode 100644 index 00000000000..bb9b2970aee --- /dev/null +++ b/docs/content/en/integrations/parsers/file/legitify.md @@ -0,0 +1,9 @@ +--- +title: "Legitify" +toc_hide: true +--- +### File Types +This DefectDojo parser accepts JSON files (in flattened format) from Legitify. For further details regarding the results, please consult the relevant [documentation](https://github.com/Legit-Labs/legitify?tab=readme-ov-file#output-options). + +### Sample Scan Data +Sample scan data for testing purposes can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/legitify). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/mobsf_scorecard.md b/docs/content/en/integrations/parsers/file/mobsf_scorecard.md new file mode 100644 index 00000000000..947228d9402 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/mobsf_scorecard.md @@ -0,0 +1,8 @@ +--- +title: "MobSF Scorecard Scanner" +toc_hide: true +--- +Export a JSON file using the API, api/v1/report_json. + +### Sample Scan Data +Sample MobSF Scorecard Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mobsf_scorecard). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/netsparker.md b/docs/content/en/integrations/parsers/file/netsparker.md index 7e46af07b12..0be436e185b 100644 --- a/docs/content/en/integrations/parsers/file/netsparker.md +++ b/docs/content/en/integrations/parsers/file/netsparker.md @@ -4,5 +4,8 @@ toc_hide: true --- Vulnerabilities List - JSON report +[Netsparker has now become Invicti](https://www.invicti.com/blog/news/netsparker-is-now-invicti-signaling-a-new-era-for-modern-appsec/). Please plan to migrate automation scripts to use the [Invicti Scan](../invicti.md) + ### Sample Scan Data + Sample Netsparker scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/netsparker). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/ptart.md b/docs/content/en/integrations/parsers/file/ptart.md new file mode 100644 index 00000000000..5ce56967493 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/ptart.md @@ -0,0 +1,14 @@ +--- +title: "PTART Reports" +toc_hide: true +--- + +### What is PTART? +PTART is a Pentest and Security Auditing Reporting Tool developed by the Michelin CERT (https://github.com/certmichelin/PTART) + +### Importing Reports +Reports can be exported to JSON format from the PTART web UI, and imported into DefectDojo by using the "PTART Report" importer. + +### Sample Scan Data +Sample scan data for testing purposes can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ptart). + diff --git a/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md b/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md new file mode 100644 index 00000000000..e938970a385 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md @@ -0,0 +1,9 @@ +--- +title: "Qualys Hacker Guardian Scan" +toc_hide: true +--- +Qualys Hacker Guardian CSV export + +### Sample Scan Data + +Sample Qualys Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/qualys_hacker_guardian). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/threat_composer.md b/docs/content/en/integrations/parsers/file/threat_composer.md new file mode 100644 index 00000000000..a5097f90066 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/threat_composer.md @@ -0,0 +1,9 @@ +--- +title: "Threat Composer" +toc_hide: true +--- +### File Types +This DefectDojo parser accepts JSON files from Threat Composer. The tool supports the [export](https://github.com/awslabs/threat-composer/tree/main?#features) of JSON report out of the browser local storage to a local file. + +### Sample Scan Data +Sample scan data for testing purposes can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/threat_composer). \ No newline at end of file diff --git a/docs/content/en/integrations/rate_limiting.md b/docs/content/en/integrations/rate_limiting.md index 0cac784c5f5..1ea76ace5b3 100644 --- a/docs/content/en/integrations/rate_limiting.md +++ b/docs/content/en/integrations/rate_limiting.md @@ -2,7 +2,7 @@ title: "Rate Limiting" description: "Configurable rate limiting on the login page to mitigate brute force attacks" draft: false -weight: 9 +weight: 11 --- diff --git a/docs/content/en/usage/features.md b/docs/content/en/usage/features.md index f1020ffd4c0..7fad563b138 100644 --- a/docs/content/en/usage/features.md +++ b/docs/content/en/usage/features.md @@ -244,7 +244,7 @@ The environment variable will override the settings in `settings.dist.py`, repla The available algorithms are: -DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL +DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool`) : The deduplication occurs based on finding.unique_id_from_tool which is a unique technical id existing in the source tool. Few scanners populate this @@ -266,12 +266,12 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL able to recognise that findings found in previous scans are actually the same as the new findings. -DEDUPE_ALGO_HASH_CODE +DEDUPE_ALGO_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `hash_code`) : The deduplication occurs based on finding.hash_code. The hash_code itself is configurable for each scanner in parameter `HASHCODE_FIELDS_PER_SCANNER`. -DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE +DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool_or_hash_code`) : A finding is a duplicate with another if they have the same unique_id_from_tool OR the same hash_code. @@ -284,7 +284,7 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE cross-parser deduplication -DEDUPE_ALGO_LEGACY +DEDUPE_ALGO_LEGACY (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `legacy`) : This is algorithm that was in place before the configuration per parser was made possible, and also the default one for backward compatibility reasons. @@ -357,7 +357,7 @@ to the hashcode configuration or calculation logic. We will mention this in the To regenerate the hashcodes, use the `dedupe` management command: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe --hash_code_only +docker compose exec uwsgi ./manage.py dedupe --hash_code_only {{< / highlight >}} This will only regenerated the hashcodes, but will not run any deduplication logic on existing findings. @@ -365,14 +365,14 @@ If you want to run deduplication again on existing findings to make sure any dup hashcode config are marked as such, run: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe +docker compose exec uwsgi ./manage.py dedupe {{< / highlight >}} The deduplication part of this command will run the deduplication for each finding in a celery task. If you want to run the deduplication in the foreground process, use: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe --dedupe_sync +docker compose exec uwsgi ./manage.py dedupe --dedupe_sync {{< / highlight >}} Please note the deduplication process is resource intensive and can take a long time to complete @@ -502,10 +502,10 @@ You can of course change this default by modifying that stanza. ### Launching from the CLI You can also invoke the SLA notification function from the CLI. For -example, if run from docker-compose: +example, if run from docker compose: {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py sla_notifications' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py sla_notifications' {{< / highlight >}} ## Reports diff --git a/docs/package-lock.json b/docs/package-lock.json index 56ef63cc01b..182df8260ac 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6,7 +6,7 @@ "": { "devDependencies": { "autoprefixer": "10.4.20", - "postcss": "8.4.41", + "postcss": "8.4.49", "postcss-cli": "11.0.0" } }, @@ -585,9 +585,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, "node_modules/picomatch": { @@ -612,9 +612,9 @@ } }, "node_modules/postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.49", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", + "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", "dev": true, "funding": [ { @@ -632,8 +632,8 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" @@ -834,9 +834,9 @@ } }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -1372,9 +1372,9 @@ "dev": true }, "picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, "picomatch": { @@ -1390,14 +1390,14 @@ "dev": true }, "postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.49", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", + "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", "dev": true, "requires": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" } }, "postcss-cli": { @@ -1504,9 +1504,9 @@ "dev": true }, "source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true }, "string-width": { diff --git a/docs/package.json b/docs/package.json index 9eb98f0f32b..15c781ee3f9 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,6 +1,6 @@ { "devDependencies": { - "postcss": "8.4.41", + "postcss": "8.4.49", "autoprefixer": "10.4.20", "postcss-cli": "11.0.0" } diff --git a/dojo/__init__.py b/dojo/__init__.py index 3b67d86fe5f..be4cc157e16 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.38.0-dev" +__version__ = "2.41.0-dev" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/dojo/admin.py b/dojo/admin.py index a2452ce1e54..c40d39e3c23 100644 --- a/dojo/admin.py +++ b/dojo/admin.py @@ -22,29 +22,25 @@ class QuestionChildAdmin(PolymorphicChildModelAdmin): - """ - Base admin class for all child models of Question - """ + + """Base admin class for all child models of Question""" base_model = Question class TextQuestionAdmin(QuestionChildAdmin): - """ - ModelAdmin for a TextQuestion - """ + + """ModelAdmin for a TextQuestion""" class ChoiceQuestionAdmin(QuestionChildAdmin): - """ - ModelAdmin for a ChoiceQuestion - """ + + """ModelAdmin for a ChoiceQuestion""" class QuestionParentAdmin(PolymorphicParentModelAdmin): - """ - Question parent model admin - """ + + """Question parent model admin""" base_model = Question child_models = ( @@ -60,29 +56,25 @@ class QuestionParentAdmin(PolymorphicParentModelAdmin): class AnswerChildAdmin(PolymorphicChildModelAdmin): - """ - Base admin class for all child Answer models - """ + + """Base admin class for all child Answer models""" base_model = Answer class TextAnswerAdmin(AnswerChildAdmin): - """ - ModelAdmin for TextAnswer - """ + + """ModelAdmin for TextAnswer""" class ChoiceAnswerAdmin(AnswerChildAdmin): - """ - ModelAdmin for ChoiceAnswer - """ + + """ModelAdmin for ChoiceAnswer""" class AnswerParentAdmin(PolymorphicParentModelAdmin): - """ - The parent model admin for answer - """ + + """The parent model admin for answer""" list_display = ( "answered_survey", diff --git a/dojo/announcement/views.py b/dojo/announcement/views.py index 6b0cb16bc3c..26160c3236b 100644 --- a/dojo/announcement/views.py +++ b/dojo/announcement/views.py @@ -81,12 +81,11 @@ def dismiss_announcement(request): extra_tags="alert-success", ) return HttpResponseRedirect("dashboard") - else: - messages.add_message( - request, - messages.ERROR, - _("Failed to remove announcement."), - extra_tags="alert-danger", - ) - return render(request, "dojo/dismiss_announcement.html") + messages.add_message( + request, + messages.ERROR, + _("Failed to remove announcement."), + extra_tags="alert-danger", + ) + return render(request, "dojo/dismiss_announcement.html") return render(request, "dojo/dismiss_announcement.html") diff --git a/dojo/api_v2/exception_handler.py b/dojo/api_v2/exception_handler.py index 513c98004b7..8f395026b03 100644 --- a/dojo/api_v2/exception_handler.py +++ b/dojo/api_v2/exception_handler.py @@ -2,6 +2,7 @@ from django.core.exceptions import ValidationError from django.db.models.deletion import RestrictedError +from rest_framework.exceptions import ParseError from rest_framework.response import Response from rest_framework.status import ( HTTP_400_BAD_REQUEST, @@ -20,7 +21,11 @@ def custom_exception_handler(exc, context): # to get the standard error response. response = exception_handler(exc, context) - if isinstance(exc, RestrictedError): + if isinstance(exc, ParseError) and "JSON parse error" in str(exc): + response = Response() + response.status_code = HTTP_400_BAD_REQUEST + response.data = {"message": "JSON request content is malformed"} + elif isinstance(exc, RestrictedError): # An object cannot be deleted because it has dependent objects. response = Response() response.status_code = HTTP_409_CONFLICT diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index e32683c3742..6c6b4792757 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -29,8 +29,7 @@ def delete_preview(self, request, pk=None): def flatten(elem): if isinstance(elem, list): return itertools.chain.from_iterable(map(flatten, elem)) - else: - return [elem] + return [elem] rels = [ { diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py index f7669826830..fe508c92b1b 100644 --- a/dojo/api_v2/permissions.py +++ b/dojo/api_v2/permissions.py @@ -35,8 +35,7 @@ def check_post_permission(request, post_model, post_pk, post_permission): raise ParseError(msg) object = get_object_or_404(post_model, pk=request.data.get(post_pk)) return user_has_permission(request.user, object, post_permission) - else: - return True + return True def check_object_permission( @@ -49,14 +48,13 @@ def check_object_permission( ): if request.method == "GET": return user_has_permission(request.user, object, get_permission) - elif request.method == "PUT" or request.method == "PATCH": + if request.method == "PUT" or request.method == "PATCH": return user_has_permission(request.user, object, put_permission) - elif request.method == "DELETE": + if request.method == "DELETE": return user_has_permission(request.user, object, delete_permission) - elif request.method == "POST": + if request.method == "POST": return user_has_permission(request.user, object, post_permission) - else: - return False + return False class UserHasAppAnalysisPermission(permissions.BasePermission): @@ -113,12 +111,11 @@ def has_permission(self, request, view): return user_has_configuration_permission( request.user, "auth.view_group", ) - elif request.method == "POST": + if request.method == "POST": return user_has_configuration_permission( request.user, "auth.add_group", ) - else: - return True + return True def has_object_permission(self, request, view, obj): if request.method == "GET": @@ -130,14 +127,13 @@ def has_object_permission(self, request, view, obj): ) and user_has_permission( request.user, obj, Permissions.Group_View, ) - else: - return check_object_permission( - request, - obj, - Permissions.Group_View, - Permissions.Group_Edit, - Permissions.Group_Delete, - ) + return check_object_permission( + request, + obj, + Permissions.Group_View, + Permissions.Group_Edit, + Permissions.Group_Delete, + ) class UserHasDojoGroupMemberPermission(permissions.BasePermission): @@ -188,8 +184,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -293,9 +288,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Engagement_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasEngagementPermission.path_engagement_post.match( @@ -308,15 +302,14 @@ def has_object_permission(self, request, view, obj): Permissions.Engagement_Edit, Permissions.Engagement_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Engagement_View, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Engagement_View, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + ) class UserHasRiskAcceptancePermission(permissions.BasePermission): @@ -334,9 +327,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Risk_Acceptance, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasRiskAcceptancePermission.path_risk_acceptance_post.match( @@ -351,15 +343,14 @@ def has_object_permission(self, request, view, obj): Permissions.Risk_Acceptance, Permissions.Risk_Acceptance, ) - else: - return check_object_permission( - request, - obj, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - ) + return check_object_permission( + request, + obj, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + ) class UserHasFindingPermission(permissions.BasePermission): @@ -382,9 +373,8 @@ def has_permission(self, request, view): return check_post_permission( request, Test, "test", Permissions.Finding_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if ( @@ -402,15 +392,14 @@ def has_object_permission(self, request, view, obj): Permissions.Finding_Edit, Permissions.Finding_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Finding_View, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Finding_View, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + ) class UserHasImportPermission(permissions.BasePermission): @@ -435,7 +424,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, engagement, Permissions.Import_Scan_Result, ) - elif engagement_id := converted_dict.get("engagement_id"): + if engagement_id := converted_dict.get("engagement_id"): # engagement_id doesn't exist msg = f'Engagement "{engagement_id}" does not exist' raise serializers.ValidationError(msg) @@ -452,19 +441,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need engagement_id or product_name + engagement_name to perform import", ) - else: - # the engagement doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need engagement_id or product_name + engagement_name to perform import", - ) + return None + # the engagement doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need engagement_id or product_name + engagement_name to perform import", + ) class UserHasMetaImportPermission(permissions.BasePermission): @@ -490,13 +479,12 @@ def has_permission(self, request, view): return user_has_permission( request.user, product, Permissions.Import_Scan_Result, ) - elif product_id := converted_dict.get("product_id"): + if product_id := converted_dict.get("product_id"): # product_id doesn't exist msg = f'Product "{product_id}" does not exist' raise serializers.ValidationError(msg) - else: - msg = "Need product_id or product_name to perform import" - raise serializers.ValidationError(msg) + msg = "Need product_id or product_name to perform import" + raise serializers.ValidationError(msg) class UserHasProductPermission(permissions.BasePermission): @@ -556,8 +544,7 @@ def has_permission(self, request, view): return user_has_global_permission( request.user, Permissions.Product_Type_Add, ) - else: - return True + return True def has_object_permission(self, request, view, obj): return check_object_permission( @@ -631,7 +618,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, test, Permissions.Import_Scan_Result, ) - elif test_id := converted_dict.get("test_id"): + if test_id := converted_dict.get("test_id"): # test_id doesn't exist msg = f'Test "{test_id}" does not exist' raise serializers.ValidationError(msg) @@ -648,19 +635,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need test_id or product_name + engagement_name + scan_type to perform reimport", ) - else: - # the test doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need test_id or product_name + engagement_name + scan_type to perform reimport", - ) + return None + # the test doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need test_id or product_name + engagement_name + scan_type to perform reimport", + ) class UserHasTestPermission(permissions.BasePermission): @@ -676,9 +663,8 @@ def has_permission(self, request, view): return check_post_permission( request, Engagement, "engagement", Permissions.Test_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasTestPermission.path_tests_post.match( @@ -691,15 +677,14 @@ def has_object_permission(self, request, view, obj): Permissions.Test_Edit, Permissions.Test_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Test_View, - Permissions.Test_Edit, - Permissions.Test_Edit, - Permissions.Test_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Test_View, + Permissions.Test_Edit, + Permissions.Test_Edit, + Permissions.Test_Edit, + ) class UserHasTestImportPermission(permissions.BasePermission): @@ -776,8 +761,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -840,8 +824,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -934,9 +917,8 @@ def raise_no_auto_create_import_validation_error( if product_type_name: msg = f'Product "{product_name}" does not exist in Product_Type "{product_type_name}"' raise serializers.ValidationError(msg) - else: - msg = f'Product "{product_name}" does not exist' - raise serializers.ValidationError(msg) + msg = f'Product "{product_name}" does not exist' + raise serializers.ValidationError(msg) if engagement_name and not engagement: msg = f'Engagement "{engagement_name}" does not exist in Product "{product_name}"' @@ -1021,12 +1003,11 @@ def check_auto_create_permission( # new product type can be created with current user as owner, so # all objects in it can be created as well return True - else: - if not user_has_permission( - user, product_type, Permissions.Product_Type_Add_Product, - ): - msg = f'No permission to create products in product_type "{product_type}"' - raise PermissionDenied(msg) + if not user_has_permission( + user, product_type, Permissions.Product_Type_Add_Product, + ): + msg = f'No permission to create products in product_type "{product_type}"' + raise PermissionDenied(msg) # product can be created, so objects in it can be created as well return True diff --git a/dojo/api_v2/prefetch/prefetcher.py b/dojo/api_v2/prefetch/prefetcher.py index 79a4b0e7314..1c45e309dce 100644 --- a/dojo/api_v2/prefetch/prefetcher.py +++ b/dojo/api_v2/prefetch/prefetcher.py @@ -3,20 +3,28 @@ from rest_framework.serializers import ModelSerializer +from dojo.models import FileUpload + from . import utils # Reduce the scope of search for serializers. SERIALIZER_DEFS_MODULE = "dojo.api_v2.serializers" +preferred_serializers = { + FileUpload: "FileSerializer", +} + class _Prefetcher: @staticmethod def _build_serializers(): - """Returns a map model -> serializer where model is a django model and serializer is the corresponding + """ + Returns a map model -> serializer where model is a django model and serializer is the corresponding serializer used to serialize the model Returns: dict[model, serializer]: map of model to their serializer + """ def _is_model_serializer(obj): @@ -31,7 +39,11 @@ def _is_model_serializer(obj): for _, serializer in available_serializers: model = serializer.Meta.model - serializers[model] = serializer + if model in preferred_serializers: + if serializer.__name__ == preferred_serializers[model]: + serializers[model] = serializer + else: + serializers[model] = serializer # We add object->None to have a more uniform processing later on serializers[object] = None @@ -42,13 +54,15 @@ def __init__(self): self._prefetch_data = {} def _find_serializer(self, field_type): - """Find the best suited serializer for the given type. + """ + Find the best suited serializer for the given type. Args: field_type (django.db.models.fields): the field type for which we need to find a serializer Returns: rest_framework.serializers.ModelSerializer: The serializer if one has been found or None + """ # If the type is represented in the map then return the serializer if field_type in self._serializers: @@ -62,11 +76,13 @@ def _find_serializer(self, field_type): return self._find_serializer(parent_class) def _prefetch(self, entry, fields_to_fetch): - """Apply prefetching for the given field on the given entry + """ + Apply prefetching for the given field on the given entry Args: entry (ModelInstance): Instance of a model as returned by a django queryset field_to_fetch (list[string]): fields to prefetch + """ for field_to_fetch in fields_to_fetch: # Get the field from the instance diff --git a/dojo/api_v2/prefetch/schema.py b/dojo/api_v2/prefetch/schema.py index 535e01e4e6c..ef5cbbf389f 100644 --- a/dojo/api_v2/prefetch/schema.py +++ b/dojo/api_v2/prefetch/schema.py @@ -18,7 +18,8 @@ def _get_path_to_GET_serializer_map(generator): def get_serializer_ref_name(serializer): - """Get serializer's ref_name + """ + Get serializer's ref_name inspired by https://github.com/axnsan12/drf-yasg/blob/78031f0c189585c30fccb5005a6899f2d34289a9/src/drf_yasg/utils.py#L416 :param serializer: Serializer instance @@ -37,14 +38,14 @@ def get_serializer_ref_name(serializer): def prefetch_postprocessing_hook(result, generator, request, public): - """OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin. + """ + OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin. These have nothing to do with Django prefetch_related. The endpoints have an @extend_schema configured with an extra parameter 'prefetch' This parameter contains an array of relations to prefetch. These prefetched models will be returned in an additional property in the response. The below processor ensures the result schema matches this. """ - serializer_classes = _get_path_to_GET_serializer_map(generator) paths = result.get("paths", {}) diff --git a/dojo/api_v2/prefetch/utils.py b/dojo/api_v2/prefetch/utils.py index de7ea2b3834..eefb1b642ec 100644 --- a/dojo/api_v2/prefetch/utils.py +++ b/dojo/api_v2/prefetch/utils.py @@ -2,7 +2,8 @@ def _is_many_to_many_relation(field): - """Check if a field specified a many-to-many relationship as defined by django. + """ + Check if a field specified a many-to-many relationship as defined by django. This is the case if the field is an instance of the ManyToManyDescriptor as generated by the django framework @@ -11,12 +12,14 @@ def _is_many_to_many_relation(field): Returns: bool: true if the field is a many-to-many relationship + """ return isinstance(field, related.ManyToManyDescriptor) def _is_one_to_one_relation(field): - """Check if a field specified a one-to-one relationship as defined by django. + """ + Check if a field specified a one-to-one relationship as defined by django. This is the case if the field is an instance of the ForwardManyToOne as generated by the django framework @@ -25,16 +28,19 @@ def _is_one_to_one_relation(field): Returns: bool: true if the field is a one-to-one relationship + """ return isinstance(field, related.ForwardManyToOneDescriptor) def _get_prefetchable_fields(serializer): - """Get the fields that are prefetchable according to the serializer description. + """ + Get the fields that are prefetchable according to the serializer description. Method mainly used by for automatic schema generation. Args: serializer (Serializer): [description] + """ def _is_field_prefetchable(field): diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index a0908adb21d..de0e6a49dee 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -1,9 +1,8 @@ +import collections import json import logging -import os import re from datetime import datetime -from typing import List import six import tagulous @@ -77,6 +76,7 @@ Note_Type, NoteHistory, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -226,9 +226,7 @@ def to_internal_value(self, data): substrings = re.findall(r'(?:"[^"]*"|[^",]+)', s) data_safe.extend(substrings) - internal_value = tagulous.utils.render_tags(data_safe) - - return internal_value + return tagulous.utils.render_tags(data_safe) def to_representation(self, value): if not isinstance(value, list): @@ -283,10 +281,10 @@ def _pop_tags(self, validated_data): return (to_be_tagged, validated_data) -class RequestResponseDict(list): +class RequestResponseDict(collections.UserList): def __init__(self, *args, **kwargs): pretty_print = kwargs.pop("pretty_print", True) - list.__init__(self, *args, **kwargs) + collections.UserList.__init__(self, *args, **kwargs) self.pretty_print = pretty_print def __add__(self, rhs): @@ -304,8 +302,7 @@ def __str__(self): return json.dumps( self, sort_keys=True, indent=4, separators=(",", ": "), ) - else: - return json.dumps(self) + return json.dumps(self) class RequestResponseSerializerField(serializers.ListSerializer): @@ -420,6 +417,51 @@ class Meta: fields = "__all__" +class MetadataSerializer(serializers.Serializer): + name = serializers.CharField(max_length=120) + value = serializers.CharField(max_length=300) + + +class MetaMainSerializer(serializers.Serializer): + id = serializers.IntegerField(read_only=True) + + product = serializers.PrimaryKeyRelatedField( + queryset=Product.objects.all(), + required=False, + default=None, + allow_null=True, + ) + endpoint = serializers.PrimaryKeyRelatedField( + queryset=Endpoint.objects.all(), + required=False, + default=None, + allow_null=True, + ) + finding = serializers.PrimaryKeyRelatedField( + queryset=Finding.objects.all(), + required=False, + default=None, + allow_null=True, + ) + metadata = MetadataSerializer(many=True) + + def validate(self, data): + product_id = data.get("product", None) + endpoint_id = data.get("endpoint", None) + finding_id = data.get("finding", None) + metadata = data.get("metadata") + + for item in metadata: + # this will only verify that one and only one of product, endpoint, or finding is passed... + DojoMeta(product=product_id, + endpoint=endpoint_id, + finding=finding_id, + name=item.get("name"), + value=item.get("value")).clean() + + return data + + class ProductMetaSerializer(serializers.ModelSerializer): class Meta: model = DojoMeta @@ -429,6 +471,7 @@ class Meta: class UserSerializer(serializers.ModelSerializer): date_joined = serializers.DateTimeField(read_only=True) last_login = serializers.DateTimeField(read_only=True, allow_null=True) + email = serializers.EmailField(required=True) password = serializers.CharField( write_only=True, style={"input_type": "password"}, @@ -549,14 +592,13 @@ def validate(self, data): msg = "Only superusers are allowed to add or edit superusers." raise ValidationError(msg) - if ( - self.context["request"].method in ["PATCH", "PUT"] - and "password" in data - ): + if self.context["request"].method in ["PATCH", "PUT"] and "password" in data: msg = "Update of password though API is not allowed" raise ValidationError(msg) - else: - return super().validate(data) + if self.context["request"].method == "POST" and "password" not in data and settings.REQUIRE_PASSWORD_ON_USER: + msg = "Passwords must be supplied for new users" + raise ValidationError(msg) + return super().validate(data) class UserContactInfoSerializer(serializers.ModelSerializer): @@ -806,21 +848,10 @@ class Meta: def validate(self, data): if file := data.get("file"): - ext = os.path.splitext(file.name)[1] # [0] returns path+filename - valid_extensions = settings.FILE_UPLOAD_TYPES - if ext.lower() not in valid_extensions: - if accepted_extensions := f"{', '.join(valid_extensions)}": - msg = ( - "Unsupported extension. Supported extensions are as " - f"follows: {accepted_extensions}" - ) - else: - msg = ( - "File uploads are prohibited due to the list of acceptable " - "file extensions being empty" - ) - raise ValidationError(msg) + # the clean will validate the file extensions and raise a Validation error if the extensions are not accepted + FileUpload(title=file.name, file=file).clean() return data + return None class RawFileSerializer(serializers.ModelSerializer): @@ -1073,8 +1104,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"engagement_id": engagement.id, "files": new_files} - return new_data + return {"engagement_id": engagement.id, "files": new_files} class EngagementCheckListSerializer(serializers.ModelSerializer): @@ -1146,8 +1176,7 @@ def run_validators(self, initial_data): if "finding, endpoint must make a unique set" in str(exc): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) from exc - else: - raise + raise def create(self, validated_data): endpoint = validated_data.get("endpoint") @@ -1160,8 +1189,7 @@ def create(self, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise status.mitigated = validated_data.get("mitigated", False) status.false_positive = validated_data.get("false_positive", False) status.out_of_scope = validated_data.get("out_of_scope", False) @@ -1177,8 +1205,7 @@ def update(self, instance, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise class EndpointSerializer(TaggitSerializer, serializers.ModelSerializer): @@ -1247,7 +1274,7 @@ def validate(self, data): ) ) ) or ( - self.context["request"].method in ["POST"] and endpoint.count() > 0 + self.context["request"].method == "POST" and endpoint.count() > 0 ): msg = ( "It appears as though an endpoint with this data already " @@ -1411,7 +1438,7 @@ class TestTypeSerializer(TaggitSerializer, serializers.ModelSerializer): class Meta: model = Test_Type - fields = "__all__" + exclude = ("dynamically_generated",) class TestToNotesSerializer(serializers.Serializer): @@ -1439,8 +1466,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"test_id": test.id, "files": new_files} - return new_data + return {"test_id": test.id, "files": new_files} class TestImportFindingActionSerializer(serializers.ModelSerializer): @@ -1467,7 +1493,8 @@ class RiskAcceptanceSerializer(serializers.ModelSerializer): def create(self, validated_data): instance = super().create(validated_data) - add_findings_to_risk_acceptance(instance, instance.accepted_findings.all()) + user = getattr(self.context.get("request", None), "user", None) + add_findings_to_risk_acceptance(user, instance, instance.accepted_findings.all()) return instance def update(self, instance, validated_data): @@ -1481,11 +1508,12 @@ def update(self, instance, validated_data): findings_to_remove = Finding.objects.filter(id__in=[x.id for x in findings_to_remove]) # Make the update in the database instance = super().update(instance, validated_data) + user = getattr(self.context.get("request", None), "user", None) # Add the new findings - add_findings_to_risk_acceptance(instance, findings_to_add) + add_findings_to_risk_acceptance(user, instance, findings_to_add) # Remove the ones that were not present in the payload for finding in findings_to_remove: - remove_finding_from_risk_acceptance(instance, finding) + remove_finding_from_risk_acceptance(user, instance, finding) return instance @extend_schema_field(serializers.CharField()) @@ -1521,7 +1549,7 @@ def get_engagement(self, obj): ) def validate(self, data): - def validate_findings_have_same_engagement(finding_objects: List[Finding]): + def validate_findings_have_same_engagement(finding_objects: list[Finding]): engagements = finding_objects.values_list("test__engagement__id", flat=True).distinct().count() if engagements > 1: msg = "You are not permitted to add findings from multiple engagements" @@ -1696,8 +1724,7 @@ def get_related_fields(self, obj): return FindingRelatedFieldsSerializer( required=False, ).to_representation(obj) - else: - return None + return None def get_display_status(self, obj) -> str: return obj.status() @@ -1734,15 +1761,14 @@ def update(self, instance, validated_data): # If we need to push to JIRA, an extra save call is needed. # Also if we need to update the mitigation date of the finding. - # TODO try to combine create and save, but for now I'm just fixing a + # TODO: try to combine create and save, but for now I'm just fixing a # bug and don't want to change to much if push_to_jira: instance.save(push_to_jira=push_to_jira) # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(instance, to_be_tagged) - return tag_object + return self._save_tags(instance, to_be_tagged) def validate(self, data): if self.context["request"].method == "PATCH": @@ -1761,10 +1787,10 @@ def validate(self, data): is_risk_accepted = data.get("risk_accepted", False) if (is_active or is_verified) and is_duplicate: - msg = "Duplicate findings cannot be" " verified or active" + msg = "Duplicate findings cannot be verified or active" raise serializers.ValidationError(msg) if is_false_p and is_verified: - msg = "False positive findings cannot " "be verified." + msg = "False positive findings cannot be verified." raise serializers.ValidationError(msg) if is_risk_accepted and not self.instance.risk_accepted: @@ -1871,15 +1897,14 @@ def create(self, validated_data): ) # If we need to push to JIRA, an extra save call is needed. - # TODO try to combine create and save, but for now I'm just fixing a + # TODO: try to combine create and save, but for now I'm just fixing a # bug and don't want to change to much if push_to_jira or new_finding: new_finding.save(push_to_jira=push_to_jira) # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(new_finding, to_be_tagged) - return tag_object + return self._save_tags(new_finding, to_be_tagged) def validate(self, data): if "reporter" not in data: @@ -1936,6 +1961,8 @@ class Meta: exclude = ("cve",) def create(self, validated_data): + to_be_tagged, validated_data = self._pop_tags(validated_data) + # Save vulnerability ids and pop them if "vulnerability_id_template_set" in validated_data: vulnerability_id_set = validated_data.pop( @@ -1958,6 +1985,7 @@ def create(self, validated_data): ) new_finding_template.save() + self._save_tags(new_finding_template, to_be_tagged) return new_finding_template def update(self, instance, validated_data): @@ -2046,12 +2074,12 @@ def validate(self, data): def get_findings_count(self, obj) -> int: return obj.findings_count - # TODO, maybe extend_schema_field is needed here? - def get_findings_list(self, obj) -> List[int]: + # TODO: maybe extend_schema_field is needed here? + def get_findings_list(self, obj) -> list[int]: return obj.open_findings_list -class ImportScanSerializer(serializers.Serializer): +class CommonImportScanSerializer(serializers.Serializer): scan_date = serializers.DateField( required=False, help_text="Scan completion date will be used on all findings.", @@ -2068,8 +2096,8 @@ class ImportScanSerializer(serializers.Serializer): verified = serializers.BooleanField( help_text="Override the verified setting from the tool.", ) - scan_type = serializers.ChoiceField(choices=get_choices_sorted()) - # TODO why do we allow only existing endpoints? + + # TODO: why do we allow only existing endpoints? endpoint_to_add = serializers.PrimaryKeyRelatedField( queryset=Endpoint.objects.all(), required=False, @@ -2089,35 +2117,15 @@ class ImportScanSerializer(serializers.Serializer): required=False, help_text="Resource link to source code", ) - engagement = serializers.PrimaryKeyRelatedField( - queryset=Engagement.objects.all(), required=False, - ) + test_title = serializers.CharField(required=False) auto_create_context = serializers.BooleanField(required=False) deduplication_on_engagement = serializers.BooleanField(required=False) lead = serializers.PrimaryKeyRelatedField( allow_null=True, default=None, queryset=User.objects.all(), ) - tags = TagListSerializerField( - required=False, allow_empty=True, help_text="Add tags that help describe this scan.", - ) - close_old_findings = serializers.BooleanField( - required=False, - default=False, - help_text="Select if old findings no longer present in the report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed.", - ) - close_old_findings_product_scope = serializers.BooleanField( - required=False, - default=False, - help_text="Select if close_old_findings applies to all findings of the same type in the product. " - "By default, it is false meaning that only old findings of the same type in the engagement are in scope.", - ) push_to_jira = serializers.BooleanField(default=False) environment = serializers.CharField(required=False) - version = serializers.CharField( - required=False, help_text="Version that was scanned.", - ) build_id = serializers.CharField( required=False, help_text="ID of the build that was scanned.", ) @@ -2151,9 +2159,6 @@ class ImportScanSerializer(serializers.Serializer): # extra fields populated in response # need to use the _id suffix as without the serializer framework gets # confused - test = serializers.IntegerField( - read_only=True, - ) # left for backwards compatibility test_id = serializers.IntegerField(read_only=True) engagement_id = serializers.IntegerField(read_only=True) product_id = serializers.IntegerField(read_only=True) @@ -2168,10 +2173,75 @@ class ImportScanSerializer(serializers.Serializer): required=False, ) - def set_context( + def get_importer( + self, + **kwargs: dict, + ) -> BaseImporter: + """ + Returns a new instance of an importer that extends + the BaseImporter class + """ + return DefaultImporter(**kwargs) + + def process_scan( self, data: dict, - ) -> dict: + context: dict, + ) -> None: + """ + Process the scan with all of the supplied data fully massaged + into the format we are expecting + + Raises exceptions in the event of an error + """ + try: + importer = self.get_importer(**context) + context["test"], _, _, _, _, _, _ = importer.process_scan( + context.pop("scan", None), + ) + # Update the response body with some new data + if test := context.get("test"): + data["test"] = test.id + data["test_id"] = test.id + data["engagement_id"] = test.engagement.id + data["product_id"] = test.engagement.product.id + data["product_type_id"] = test.engagement.product.prod_type.id + data["statistics"] = {"after": test.statistics} + # convert to exception otherwise django rest framework will swallow them as 400 error + # exceptions are already logged in the importer + except SyntaxError as se: + raise Exception(se) + except ValueError as ve: + raise Exception(ve) + + def validate(self, data: dict) -> dict: + scan_type = data.get("scan_type") + file = data.get("file") + if not file and requires_file(scan_type): + msg = f"Uploading a Report File is required for {scan_type}" + raise serializers.ValidationError(msg) + if file and is_scan_file_too_large(file): + msg = f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" + raise serializers.ValidationError(msg) + tool_type = requires_tool_type(scan_type) + if tool_type: + api_scan_configuration = data.get("api_scan_configuration") + if ( + api_scan_configuration + and tool_type + != api_scan_configuration.tool_configuration.tool_type.name + ): + msg = f"API scan configuration must be of tool type {tool_type}" + raise serializers.ValidationError(msg) + return data + + def validate_scan_date(self, value: str) -> None: + if value and value > timezone.localdate(): + msg = "The scan_date cannot be in the future!" + raise serializers.ValidationError(msg) + return value + + def setup_common_context(self, data: dict) -> dict: """ Process all of the user supplied inputs to massage them into the correct format the importer is expecting to see @@ -2179,9 +2249,17 @@ def set_context( context = dict(data) # update some vars context["scan"] = data.pop("file", None) - context["environment"] = Development_Environment.objects.get( - name=data.get("environment", "Development"), - ) + + if context.get("auto_create_context"): + environment = Development_Environment.objects.get_or_create(name=data.get("environment", "Development"))[0] + else: + try: + environment = Development_Environment.objects.get(name=data.get("environment", "Development")) + except: + msg = "Environment named " + data.get("environment") + " does not exist." + raise ValidationError(msg) + + context["environment"] = environment # Set the active/verified status based upon the overrides if "active" in self.initial_data: context["active"] = data.get("active") @@ -2212,6 +2290,51 @@ def set_context( if context.get("scan_date") else None ) + + # engagement end date was not being used at all and so target_end would also turn into None + # in this case, do not want to change target_end unless engagement_end exists + eng_end_date = context.get("engagement_end_date", None) + if eng_end_date: + context["target_end"] = context.get("engagement_end_date") + + return context + + +class ImportScanSerializer(CommonImportScanSerializer): + scan_type = serializers.ChoiceField(choices=get_choices_sorted()) + engagement = serializers.PrimaryKeyRelatedField( + queryset=Engagement.objects.all(), required=False, + ) + tags = TagListSerializerField( + required=False, allow_empty=True, help_text="Add tags that help describe this scan.", + ) + close_old_findings = serializers.BooleanField( + required=False, + default=False, + help_text="Select if old findings no longer present in the report get closed as mitigated when importing. " + "If service has been set, only the findings for this service will be closed.", + ) + close_old_findings_product_scope = serializers.BooleanField( + required=False, + default=False, + help_text="Select if close_old_findings applies to all findings of the same type in the product. " + "By default, it is false meaning that only old findings of the same type in the engagement are in scope.", + ) + version = serializers.CharField( + required=False, help_text="Version that was scanned.", + ) + # extra fields populated in response + # need to use the _id suffix as without the serializer framework gets + # confused + test = serializers.IntegerField( + read_only=True, + ) # left for backwards compatibility + + def set_context( + self, + data: dict, + ) -> dict: + context = self.setup_common_context(data) # Process the auto create context inputs self.process_auto_create_create_context(context) @@ -2238,47 +2361,6 @@ def process_auto_create_create_context( # Raise an explicit drf exception here raise ValidationError(str(e)) - def get_importer( - self, - **kwargs: dict, - ) -> BaseImporter: - """ - Returns a new instance of an importer that extends - the BaseImporter class - """ - return DefaultImporter(**kwargs) - - def process_scan( - self, - data: dict, - context: dict, - ) -> None: - """ - Process the scan with all of the supplied data fully massaged - into the format we are expecting - - Raises exceptions in the event of an error - """ - try: - importer = self.get_importer(**context) - context["test"], _, _, _, _, _, _ = importer.process_scan( - context.pop("scan", None), - ) - # Update the response body with some new data - if test := context.get("test"): - data["test"] = test.id - data["test_id"] = test.id - data["engagement_id"] = test.engagement.id - data["product_id"] = test.engagement.product.id - data["product_type_id"] = test.engagement.product.prod_type.id - data["statistics"] = {"after": test.statistics} - # convert to exception otherwise django rest framework will swallow them as 400 error - # exceptions are already logged in the importer - except SyntaxError as se: - raise Exception(se) - except ValueError as ve: - raise Exception(ve) - def save(self, push_to_jira=False): # Go through the validate method data = self.validated_data @@ -2289,50 +2371,9 @@ def save(self, push_to_jira=False): # Import the scan with all of the supplied data self.process_scan(data, context) - def validate(self, data: dict) -> dict: - scan_type = data.get("scan_type") - file = data.get("file") - if not file and requires_file(scan_type): - msg = f"Uploading a Report File is required for {scan_type}" - raise serializers.ValidationError(msg) - if file and is_scan_file_too_large(file): - msg = f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" - raise serializers.ValidationError(msg) - tool_type = requires_tool_type(scan_type) - if tool_type: - api_scan_configuration = data.get("api_scan_configuration") - if ( - api_scan_configuration - and tool_type - != api_scan_configuration.tool_configuration.tool_type.name - ): - msg = f"API scan configuration must be of tool type {tool_type}" - raise serializers.ValidationError(msg) - return data - - def validate_scan_date(self, value: str) -> None: - if value and value > timezone.localdate(): - msg = "The scan_date cannot be in the future!" - raise serializers.ValidationError(msg) - return value +class ReImportScanSerializer(TaggitSerializer, CommonImportScanSerializer): -class ReImportScanSerializer(TaggitSerializer, serializers.Serializer): - scan_date = serializers.DateField( - required=False, - help_text="Scan completion date will be used on all findings.", - ) - minimum_severity = serializers.ChoiceField( - choices=SEVERITY_CHOICES, - default="Info", - help_text="Minimum severity level to be imported", - ) - active = serializers.BooleanField( - help_text="Override the active setting from the tool.", - ) - verified = serializers.BooleanField( - help_text="Override the verified setting from the tool.", - ) help_do_not_reactivate = "Select if the import should ignore active findings from the report, useful for triage-less scanners. Will keep existing findings closed, without reactivating them. For more information check the docs." do_not_reactivate = serializers.BooleanField( default=False, required=False, help_text=help_do_not_reactivate, @@ -2340,35 +2381,11 @@ class ReImportScanSerializer(TaggitSerializer, serializers.Serializer): scan_type = serializers.ChoiceField( choices=get_choices_sorted(), required=True, ) - endpoint_to_add = serializers.PrimaryKeyRelatedField( - queryset=Endpoint.objects.all(), - required=False, - default=None, - help_text="Enter the ID of an Endpoint that is associated with the target Product. New Findings will be added to that Endpoint.", - ) - file = serializers.FileField(allow_empty_file=True, required=False) - product_type_name = serializers.CharField(required=False) - product_name = serializers.CharField(required=False) - engagement_name = serializers.CharField(required=False) - engagement_end_date = serializers.DateField( - required=False, - help_text="End Date for Engagement. Default is current time + 365 days. Required format year-month-day", - ) - source_code_management_uri = serializers.URLField( - max_length=600, - required=False, - help_text="Resource link to source code", - ) test = serializers.PrimaryKeyRelatedField( required=False, queryset=Test.objects.all(), ) - test_title = serializers.CharField(required=False) - auto_create_context = serializers.BooleanField(required=False) - deduplication_on_engagement = serializers.BooleanField(required=False) - - push_to_jira = serializers.BooleanField(default=False) # Close the old findings if the parameter is not provided. This is to - # mentain the old API behavior after reintroducing the close_old_findings parameter + # maintain the old API behavior after reintroducing the close_old_findings parameter # also for ReImport. close_old_findings = serializers.BooleanField( required=False, @@ -2386,113 +2403,18 @@ class ReImportScanSerializer(TaggitSerializer, serializers.Serializer): required=False, help_text="Version that will be set on existing Test object. Leave empty to leave existing value in place.", ) - build_id = serializers.CharField( - required=False, help_text="ID of the build that was scanned.", - ) - branch_tag = serializers.CharField( - required=False, help_text="Branch or Tag that was scanned.", - ) - commit_hash = serializers.CharField( - required=False, help_text="Commit that was scanned.", - ) - api_scan_configuration = serializers.PrimaryKeyRelatedField( - allow_null=True, - default=None, - queryset=Product_API_Scan_Configuration.objects.all(), - ) - service = serializers.CharField( - required=False, - help_text="A service is a self-contained piece of functionality within a Product. " - "This is an optional field which is used in deduplication and closing of old findings when set. " - "This affects the whole engagement/product depending on your deduplication scope.", - ) - environment = serializers.CharField(required=False) - lead = serializers.PrimaryKeyRelatedField( - allow_null=True, default=None, queryset=User.objects.all(), - ) tags = TagListSerializerField( required=False, allow_empty=True, help_text="Modify existing tags that help describe this scan. (Existing test tags will be overwritten)", ) - group_by = serializers.ChoiceField( - required=False, - choices=Finding_Group.GROUP_BY_OPTIONS, - help_text="Choose an option to automatically group new findings by the chosen option.", - ) - create_finding_groups_for_all_findings = serializers.BooleanField( - help_text="If set to false, finding groups will only be created when there is more than one grouped finding", - required=False, - default=True, - ) - - # extra fields populated in response - # need to use the _id suffix as without the serializer framework gets - # confused - test_id = serializers.IntegerField(read_only=True) - engagement_id = serializers.IntegerField( - read_only=True, - ) # need to use the _id suffix as without the serializer framework gets confused - product_id = serializers.IntegerField(read_only=True) - product_type_id = serializers.IntegerField(read_only=True) - - statistics = ImportStatisticsSerializer(read_only=True, required=False) - apply_tags_to_findings = serializers.BooleanField( - help_text="If set to True, the tags will be applied to the findings", - required=False, - ) - apply_tags_to_endpoints = serializers.BooleanField( - help_text="If set to True, the tags will be applied to the endpoints", - required=False, - ) - def set_context( self, data: dict, ) -> dict: - """ - Process all of the user supplied inputs to massage them into the correct - format the importer is expecting to see - """ - context = dict(data) - # update some vars - context["scan"] = data.get("file", None) - context["environment"] = Development_Environment.objects.get( - name=data.get("environment", "Development"), - ) - # Set the active/verified status based upon the overrides - if "active" in self.initial_data: - context["active"] = data.get("active") - else: - context["active"] = None - if "verified" in self.initial_data: - context["verified"] = data.get("verified") - else: - context["verified"] = None - # Change the way that endpoints are sent to the importer - if endpoints_to_add := data.get("endpoint_to_add"): - context["endpoints_to_add"] = [endpoints_to_add] - else: - context["endpoint_to_add"] = None - # Convert the tags to a list if needed. At this point, the - # TaggitListSerializer has already removed commas supplied - # by the user, so this operation will consistently return - # a list to be used by the importer - if tags := context.get("tags"): - if isinstance(tags, str): - context["tags"] = tags.split(", ") - # have to make the scan_date_time timezone aware otherwise uploads via - # the API would fail (but unit tests for api upload would pass...) - context["scan_date"] = ( - timezone.make_aware( - datetime.combine(context.get("scan_date"), datetime.min.time()), - ) - if context.get("scan_date") - else None - ) - return context + return self.setup_common_context(data) def process_auto_create_create_context( self, @@ -2516,16 +2438,6 @@ def process_auto_create_create_context( # Raise an explicit drf exception here raise ValidationError(str(e)) - def get_importer( - self, - **kwargs: dict, - ) -> BaseImporter: - """ - Returns a new instance of an importer that extends - the BaseImporter class - """ - return DefaultImporter(**kwargs) - def get_reimporter( self, **kwargs: dict, @@ -2604,33 +2516,6 @@ def save(self, push_to_jira=False): # Import the scan with all of the supplied data self.process_scan(auto_create_manager, data, context) - def validate(self, data): - scan_type = data.get("scan_type") - file = data.get("file") - if not file and requires_file(scan_type): - msg = f"Uploading a Report File is required for {scan_type}" - raise serializers.ValidationError(msg) - if file and is_scan_file_too_large(file): - msg = f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" - raise serializers.ValidationError(msg) - tool_type = requires_tool_type(scan_type) - if tool_type: - api_scan_configuration = data.get("api_scan_configuration") - if ( - api_scan_configuration - and tool_type - != api_scan_configuration.tool_configuration.tool_type.name - ): - msg = f"API scan configuration must be of tool type {tool_type}" - raise serializers.ValidationError(msg) - return data - - def validate_scan_date(self, value): - if value and value > timezone.localdate(): - msg = "The scan_date cannot be in the future!" - raise serializers.ValidationError(msg) - return value - class EndpointMetaImporterSerializer(serializers.Serializer): file = serializers.FileField(required=True) @@ -2795,8 +2680,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"finding_id": finding.id, "files": new_files} - return new_data + return {"finding_id": finding.id, "files": new_files} class FindingCloseSerializer(serializers.ModelSerializer): @@ -2859,6 +2743,11 @@ class ReportGenerateSerializer(serializers.Serializer): ) +class EngagementUpdateJiraEpicSerializer(serializers.Serializer): + epic_name = serializers.CharField(required=False, max_length=200) + epic_priority = serializers.CharField(required=False, allow_null=True) + + class TagSerializer(serializers.Serializer): tags = TagListSerializerField(required=True) @@ -3053,10 +2942,9 @@ class QuestionnaireQuestionSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextQuestion): return TextQuestionSerializer(instance=instance).data - elif isinstance(instance, ChoiceQuestion): + if isinstance(instance, ChoiceQuestion): return ChoiceQuestionSerializer(instance=instance).data - else: - return QuestionSerializer(instance=instance).data + return QuestionSerializer(instance=instance).data class Meta: model = Question @@ -3093,10 +2981,9 @@ class QuestionnaireAnswerSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextAnswer): return TextAnswerSerializer(instance=instance).data - elif isinstance(instance, ChoiceAnswer): + if isinstance(instance, ChoiceAnswer): return ChoiceAnswerSerializer(instance=instance).data - else: - return AnswerSerializer(instance=instance).data + return AnswerSerializer(instance=instance).data class Meta: model = Answer @@ -3170,5 +3057,10 @@ def create(self, validated_data): if 'duplicate key value violates unique constraint "dojo_announcement_pkey"' in str(e): msg = "No more than one Announcement is allowed" raise serializers.ValidationError(msg) - else: - raise + raise + + +class NotificationWebhooksSerializer(serializers.ModelSerializer): + class Meta: + model = Notification_Webhooks + fields = "__all__" diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index c956dac283d..1a9eab86416 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -111,6 +111,7 @@ Network_Locations, Note_Type, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -172,6 +173,33 @@ logger = logging.getLogger(__name__) +def schema_with_prefetch() -> dict: + return { + "list": extend_schema( + parameters=[ + OpenApiParameter( + "prefetch", + OpenApiTypes.STR, + OpenApiParameter.QUERY, + required=False, + description="List of fields for which to prefetch model instances and add those to the response", + ), + ], + ), + "retrieve": extend_schema( + parameters=[ + OpenApiParameter( + "prefetch", + OpenApiTypes.STR, + OpenApiParameter.QUERY, + required=False, + description="List of fields for which to prefetch model instances and add those to the response", + ), + ], + ), + } + + class DojoOpenApiJsonRenderer(OpenApiJsonRenderer2): def get_indent(self, accepted_media_type, renderer_context): if accepted_media_type and "indent" in accepted_media_type: @@ -180,7 +208,7 @@ def get_indent(self, accepted_media_type, renderer_context): class DojoSpectacularAPIView(SpectacularAPIView): - renderer_classes = [DojoOpenApiJsonRenderer] + SpectacularAPIView.renderer_classes + renderer_classes = [DojoOpenApiJsonRenderer, *SpectacularAPIView.renderer_classes] class DojoModelViewSet( @@ -211,30 +239,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class DojoGroupViewSet( PrefetchDojoModelViewSet, ): @@ -252,30 +257,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class DojoGroupMemberViewSet( PrefetchDojoModelViewSet, ): @@ -301,6 +283,7 @@ def partial_update(self, request, pk=None): # Authorization: superuser +@extend_schema_view(**schema_with_prefetch()) class GlobalRoleViewSet( PrefetchDojoModelViewSet, ): @@ -315,6 +298,8 @@ def get_queryset(self): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class EndPointViewSet( PrefetchDojoModelViewSet, ): @@ -370,6 +355,8 @@ def generate_report(self, request, pk=None): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class EndpointStatusViewSet( PrefetchDojoModelViewSet, ): @@ -398,6 +385,8 @@ def get_queryset(self): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class EngagementViewSet( PrefetchDojoModelViewSet, ra_api.AcceptedRisksMixin, @@ -650,7 +639,39 @@ def download_file(self, request, file_id, pk=None): # send file return generate_file_response(file_object) + @extend_schema( + request=serializers.EngagementUpdateJiraEpicSerializer, + responses={status.HTTP_200_OK: serializers.EngagementUpdateJiraEpicSerializer}, + ) + @action( + detail=True, methods=["post"], permission_classes=[IsAuthenticated], + ) + def update_jira_epic(self, request, pk=None): + engagement = self.get_object() + try: + if engagement.has_jira_issue: + jira_helper.update_epic(engagement, **request.data) + response = Response( + {"info": "Jira Epic update query sent"}, + status=status.HTTP_200_OK, + ) + else: + jira_helper.add_epic(engagement, **request.data) + response = Response( + {"info": "Jira Epic create query sent"}, + status=status.HTTP_200_OK, + ) + return response + except ValidationError: + return Response( + {"error": "Bad Request!"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class RiskAcceptanceViewSet( PrefetchDojoModelViewSet, ): @@ -668,7 +689,7 @@ def destroy(self, request, pk=None): instance = self.get_object() # Remove any findings on the risk acceptance for finding in instance.accepted_findings.all(): - remove_finding_from_risk_acceptance(instance, finding) + remove_finding_from_risk_acceptance(request.user, instance, finding) # return the response of the object being deleted return super().destroy(request, pk=pk) @@ -716,6 +737,7 @@ def download_proof(self, request, pk=None): # These are technologies in the UI and the API! # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class AppAnalysisViewSet( PrefetchDojoModelViewSet, ): @@ -734,6 +756,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class CredentialsViewSet( PrefetchDojoModelViewSet, ): @@ -747,6 +770,8 @@ def get_queryset(self): # Authorization: configuration +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class CredentialsMappingViewSet( PrefetchDojoModelViewSet, ): @@ -878,8 +903,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.FindingCreateSerializer - else: - return serializers.FindingSerializer + return serializers.FindingSerializer @extend_schema( methods=["POST"], @@ -1226,10 +1250,9 @@ def remove_tags(self, request, pk=None): {"success": "Tag(s) Removed"}, status=status.HTTP_204_NO_CONTENT, ) - else: - return Response( - delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, + ) @extend_schema( responses={ @@ -1367,10 +1390,9 @@ def _add_metadata(self, request, finding): ) return Response(data=metadata_data.data, status=status.HTTP_200_OK) - else: - return Response( - metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, + ) def _remove_metadata(self, request, finding): name = request.query_params.get("name", None) @@ -1457,13 +1479,13 @@ def metadata(self, request, pk=None): if request.method == "GET": return self._get_metadata(request, finding) - elif request.method == "POST": + if request.method == "POST": return self._add_metadata(request, finding) - elif request.method == "PUT": + if request.method == "PUT": return self._edit_metadata(request, finding) - elif request.method == "PATCH": + if request.method == "PATCH": return self._edit_metadata(request, finding) - elif request.method == "DELETE": + if request.method == "DELETE": return self._remove_metadata(request, finding) return Response( @@ -1486,6 +1508,8 @@ def get_queryset(self): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class JiraIssuesViewSet( PrefetchDojoModelViewSet, ): @@ -1511,6 +1535,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class JiraProjectViewSet( PrefetchDojoModelViewSet, ): @@ -1522,6 +1547,7 @@ class JiraProjectViewSet( "jira_instance", "product", "engagement", + "enabled", "component", "project_key", "push_all_issues", @@ -1573,6 +1599,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class ProductAPIScanConfigurationViewSet( PrefetchDojoModelViewSet, ): @@ -1599,30 +1626,8 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class DojoMetaViewSet( PrefetchDojoModelViewSet, ): @@ -1645,31 +1650,63 @@ class DojoMetaViewSet( def get_queryset(self): return get_authorized_dojo_meta(Permissions.Product_View) + @extend_schema( + methods=["post", "patch"], + request=serializers.MetaMainSerializer, + responses={status.HTTP_200_OK: serializers.MetaMainSerializer}, + filters=False, + ) + @action( + detail=False, methods=["post", "patch"], pagination_class=None, + ) + def batch(self, request, pk=None): + serialized_data = serializers.MetaMainSerializer(data=request.data) + if serialized_data.is_valid(raise_exception=True): + if request.method == "POST": + self.process_post(request.data) + if request.method == "PATCH": + self.process_patch(request.data) + + return Response(status=status.HTTP_201_CREATED, data=serialized_data.data) + + def process_post(self: object, data: dict): + product = Product.objects.filter(id=data.get("product")).first() + finding = Finding.objects.filter(id=data.get("finding")).first() + endpoint = Endpoint.objects.filter(id=data.get("endpoint")).first() + metalist = data.get("metadata") + for metadata in metalist: + try: + DojoMeta.objects.create( + product=product, + finding=finding, + endpoint=endpoint, + name=metadata.get("name"), + value=metadata.get("value"), + ) + except (IntegrityError) as ex: # this should not happen as the data was validated in the batch call + raise ValidationError(str(ex)) + + def process_patch(self: object, data: dict): + product = Product.objects.filter(id=data.get("product")).first() + finding = Finding.objects.filter(id=data.get("finding")).first() + endpoint = Endpoint.objects.filter(id=data.get("endpoint")).first() + metalist = data.get("metadata") + for metadata in metalist: + dojometa = DojoMeta.objects.filter(product=product, finding=finding, endpoint=endpoint, name=metadata.get("name")) + if dojometa: + try: + dojometa.update( + name=metadata.get("name"), + value=metadata.get("value"), + ) + except (IntegrityError) as ex: + raise ValidationError(str(ex)) + else: + msg = f"Metadata {metadata.get('name')} not found for object." + raise ValidationError(msg) -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) + +@extend_schema_view(**schema_with_prefetch()) class ProductViewSet( prefetch.PrefetchListMixin, prefetch.PrefetchRetrieveMixin, @@ -1745,30 +1782,7 @@ def generate_report(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductMemberViewSet( PrefetchDojoModelViewSet, ): @@ -1796,30 +1810,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductGroupViewSet( PrefetchDojoModelViewSet, ): @@ -1847,30 +1838,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeViewSet( PrefetchDojoModelViewSet, ): @@ -1955,30 +1923,7 @@ def generate_report(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeMemberViewSet( PrefetchDojoModelViewSet, ): @@ -2020,30 +1965,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeGroupViewSet( PrefetchDojoModelViewSet, ): @@ -2071,6 +1993,8 @@ def partial_update(self, request, pk=None): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class StubFindingsViewSet( PrefetchDojoModelViewSet, ): @@ -2091,8 +2015,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.StubFindingCreateSerializer - else: - return serializers.StubFindingSerializer + return serializers.StubFindingSerializer # Authorization: authenticated, configuration @@ -2109,6 +2032,8 @@ def get_queryset(self): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class TestsViewSet( PrefetchDojoModelViewSet, ra_api.AcceptedRisksMixin, @@ -2144,8 +2069,7 @@ def get_serializer_class(self): if self.action == "accept_risks": return ra_api.AcceptedRiskSerializer return serializers.TestCreateSerializer - else: - return serializers.TestSerializer + return serializers.TestSerializer @extend_schema( request=serializers.ReportGenerateOptionSerializer, @@ -2316,30 +2240,8 @@ def get_queryset(self): return Test_Type.objects.all().order_by("id") -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +# @extend_schema_view(**schema_with_prefetch()) +# Nested models with prefetch make the response schema too long for Swagger UI class TestImportViewSet( PrefetchDojoModelViewSet, ): @@ -2398,6 +2300,7 @@ def get_queryset(self): # Authorization: configurations +@extend_schema_view(**schema_with_prefetch()) class ToolConfigurationsViewSet( PrefetchDojoModelViewSet, ): @@ -2418,6 +2321,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class ToolProductSettingsViewSet( PrefetchDojoModelViewSet, ): @@ -2502,30 +2406,7 @@ def destroy(self, request, *args, **kwargs): # Authorization: superuser -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class UserContactInfoViewSet( PrefetchDojoModelViewSet, ): @@ -2575,6 +2456,7 @@ def get(self, request, format=None): # Authorization: authenticated users, DjangoModelPermissions class ImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet): + """ Imports a scan report into an engagement or product. @@ -2624,7 +2506,7 @@ def perform_create(self, serializer): # have been created yet push_to_jira = serializer.validated_data.get("push_to_jira") if get_system_setting("enable_jira"): - jira_driver = (engagement if engagement else product if product else None) + jira_driver = engagement or (product or None) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues logger.debug(f"push_to_jira: {push_to_jira}") @@ -2638,6 +2520,7 @@ def get_queryset(self): class EndpointMetaImporterView( mixins.CreateModelMixin, viewsets.GenericViewSet, ): + """ Imports a CSV file into a product to propagate arbitrary meta and tags on endpoints. @@ -2680,30 +2563,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class LanguageViewSet( PrefetchDojoModelViewSet, ): @@ -2736,6 +2596,7 @@ def get_queryset(self): # Authorization: object-based class ReImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet): + """ Reimports a scan report into an existing test. @@ -2793,9 +2654,7 @@ def perform_create(self, serializer): # have been created yet push_to_jira = serializer.validated_data.get("push_to_jira") if get_system_setting("enable_jira"): - jira_driver = ( - test if test else engagement if engagement else product if product else None - ) + jira_driver = test or (engagement or (product or None)) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues logger.debug(f"push_to_jira: {push_to_jira}") @@ -3138,7 +2997,8 @@ def report_generate(request, obj, options): class SystemSettingsViewSet( mixins.ListModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet, ): - """Basic control over System Settings. Use 'id' 1 for PUT, PATCH operations""" + + """Basic control over System Settings. Use 'id' 1 for PUT, PATCH operations""" permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) serializer_class = serializers.SystemSettingsSerializer @@ -3149,30 +3009,7 @@ def get_queryset(self): # Authorization: superuser -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class NotificationsViewSet( PrefetchDojoModelViewSet, ): @@ -3186,6 +3023,7 @@ def get_queryset(self): return Notifications.objects.all().order_by("id") +@extend_schema_view(**schema_with_prefetch()) class EngagementPresetsViewset( PrefetchDojoModelViewSet, ): @@ -3304,7 +3142,31 @@ class QuestionnaireEngagementSurveyViewSet( def get_queryset(self): return Engagement_Survey.objects.all().order_by("id") + @extend_schema( + request=OpenApiTypes.NONE, + parameters=[ + OpenApiParameter( + "engagement_id", OpenApiTypes.INT, OpenApiParameter.PATH, + ), + ], + responses={status.HTTP_200_OK: serializers.QuestionnaireAnsweredSurveySerializer}, + ) + @action( + detail=True, methods=["post"], url_path=r"link_engagement/(?P\d+)", + ) + def link_engagement(self, request, pk, engagement_id): + # Get the answered survey + engagement_survey = self.get_object() + # Safely get the engagement + engagement = get_object_or_404(Engagement.objects, pk=engagement_id) + # Link the engagement + answered_survey, _ = Answered_Survey.objects.get_or_create(engagement=engagement, survey=engagement_survey) + # Send a favorable response + serialized_answered_survey = serializers.QuestionnaireAnsweredSurveySerializer(answered_survey) + return Response(serialized_answered_survey.data) + +@extend_schema_view(**schema_with_prefetch()) class QuestionnaireAnsweredSurveyViewSet( prefetch.PrefetchListMixin, prefetch.PrefetchRetrieveMixin, @@ -3334,3 +3196,13 @@ class AnnouncementViewSet( def get_queryset(self): return Announcement.objects.all().order_by("id") + + +class NotificationWebhooksViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.NotificationWebhooksSerializer + queryset = Notification_Webhooks.objects.all() + filter_backends = (DjangoFilterBackend,) + filterset_fields = "__all__" + permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) # TODO: add permission also for other users diff --git a/dojo/apps.py b/dojo/apps.py index a7994d0828e..4d4d07af50e 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -71,19 +71,19 @@ def ready(self): # Load any signals here that will be ready for runtime # Importing the signals file is good enough if using the reciever decorator - import dojo.announcement.signals # noqa: F401 - import dojo.benchmark.signals # noqa: F401 - import dojo.cred.signals # noqa: F401 - import dojo.endpoint.signals # noqa: F401 - import dojo.engagement.signals # noqa: F401 - import dojo.finding_group.signals # noqa: F401 - import dojo.notes.signals # noqa: F401 - import dojo.product.signals # noqa: F401 - import dojo.product_type.signals # noqa: F401 - import dojo.risk_acceptance.signals # noqa: F401 - import dojo.sla_config.helpers # noqa: F401 - import dojo.tags_signals # noqa: F401 - import dojo.test.signals # noqa: F401 + import dojo.announcement.signals + import dojo.benchmark.signals + import dojo.cred.signals + import dojo.endpoint.signals + import dojo.engagement.signals + import dojo.finding_group.signals + import dojo.notes.signals + import dojo.product.signals + import dojo.product_type.signals + import dojo.risk_acceptance.signals + import dojo.sla_config.helpers + import dojo.tags_signals + import dojo.test.signals import dojo.tool_product.signals # noqa: F401 @@ -92,12 +92,11 @@ def get_model_fields_with_extra(model, extra_fields=()): def get_model_fields(default_fields, extra_fields=()): - combined = default_fields + extra_fields - return combined + return default_fields + extra_fields def get_model_default_fields(model): return tuple( field.name for field in model._meta.fields if - isinstance(field, (models.CharField, models.TextField)) + isinstance(field, models.CharField | models.TextField) ) diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py index a542d7c6e01..8f013b60061 100644 --- a/dojo/authorization/authorization.py +++ b/dojo/authorization/authorization.py @@ -66,7 +66,7 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_type_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Product) and permission.value >= Permissions.Product_View.value ): @@ -87,51 +87,51 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Engagement) and permission in Permissions.get_engagement_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Test) and permission in Permissions.get_test_permissions() ): return user_has_permission(user, obj.engagement.product, permission) - elif ( + if ( isinstance(obj, Finding) or isinstance(obj, Stub_Finding) ) and permission in Permissions.get_finding_permissions(): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Finding_Group) and permission in Permissions.get_finding_group_permissions() ): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Endpoint) and permission in Permissions.get_endpoint_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Languages) and permission in Permissions.get_language_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, App_Analysis) and permission in Permissions.get_technology_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_API_Scan_Configuration) and permission in Permissions.get_product_api_scan_configuration_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_Type_Member) and permission in Permissions.get_product_type_member_permissions() ): @@ -140,9 +140,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product_type, permission, ) - else: - return user_has_permission(user, obj.product_type, permission) - elif ( + return user_has_permission(user, obj.product_type, permission) + if ( isinstance(obj, Product_Member) and permission in Permissions.get_product_member_permissions() ): @@ -151,19 +150,18 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product, permission, ) - else: - return user_has_permission(user, obj.product, permission) - elif ( + return user_has_permission(user, obj.product, permission) + if ( isinstance(obj, Product_Type_Group) and permission in Permissions.get_product_type_group_permissions() ): return user_has_permission(user, obj.product_type, permission) - elif ( + if ( isinstance(obj, Product_Group) and permission in Permissions.get_product_group_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Dojo_Group) and permission in Permissions.get_group_permissions() ): @@ -173,7 +171,7 @@ def user_has_permission(user, obj, permission): return group_member is not None and role_has_permission( group_member.role.id, permission, ) - elif ( + if ( isinstance(obj, Dojo_Group_Member) and permission in Permissions.get_group_member_permissions() ): @@ -182,9 +180,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.group, permission, ) - else: - return user_has_permission(user, obj.group, permission) - elif ( + return user_has_permission(user, obj.group, permission) + if ( isinstance(obj, Cred_Mapping) and permission in Permissions.get_credential_permissions() ): @@ -202,9 +199,9 @@ def user_has_permission(user, obj, permission): return user_has_permission( user, obj.finding.test.engagement.product, permission, ) - else: - msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" - raise NoAuthorizationImplementedError(msg) + return None + msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" + raise NoAuthorizationImplementedError(msg) def user_has_global_permission(user, permission): diff --git a/dojo/authorization/authorization_decorators.py b/dojo/authorization/authorization_decorators.py index 3063d0821d1..1f1bc9dbcb9 100644 --- a/dojo/authorization/authorization_decorators.py +++ b/dojo/authorization/authorization_decorators.py @@ -12,7 +12,6 @@ def user_is_authorized(model, permission, arg, lookup="pk", func=None): """Decorator for functions that ensures the user has permission on an object.""" - if func is None: return functools.partial( user_is_authorized, model, permission, arg, lookup, @@ -41,7 +40,6 @@ def _wrapped(request, *args, **kwargs): def user_has_global_permission(permission, func=None): """Decorator for functions that ensures the user has a (global) permission""" - if func is None: return functools.partial(user_has_global_permission, permission) @@ -54,10 +52,7 @@ def _wrapped(request, *args, **kwargs): def user_is_configuration_authorized(permission, func=None): - """ - Decorator for views that checks whether a user has a particular permission enabled. - """ - + """Decorator for views that checks whether a user has a particular permission enabled.""" if func is None: return functools.partial(user_is_configuration_authorized, permission) diff --git a/dojo/authorization/roles_permissions.py b/dojo/authorization/roles_permissions.py index 779463258ff..530008a2f7a 100644 --- a/dojo/authorization/roles_permissions.py +++ b/dojo/authorization/roles_permissions.py @@ -517,9 +517,7 @@ def get_roles_with_permissions(): def get_global_roles_with_permissions(): - """ - Extra permissions for global roles, on top of the permissions granted to the "normal" roles above. - """ + """Extra permissions for global roles, on top of the permissions granted to the "normal" roles above.""" return { Roles.Maintainer: {Permissions.Product_Type_Add}, Roles.Owner: {Permissions.Product_Type_Add}, diff --git a/dojo/context_processors.py b/dojo/context_processors.py index 12168d9ea64..782cf767ce2 100644 --- a/dojo/context_processors.py +++ b/dojo/context_processors.py @@ -25,6 +25,7 @@ def globalize_vars(request): "SAML2_LOGOUT_URL": settings.SAML2_LOGOUT_URL, "DOCUMENTATION_URL": settings.DOCUMENTATION_URL, "API_TOKENS_ENABLED": settings.API_TOKENS_ENABLED, + "API_TOKEN_AUTH_ENDPOINT_ENABLED": settings.API_TOKEN_AUTH_ENDPOINT_ENABLED, } diff --git a/dojo/cred/queries.py b/dojo/cred/queries.py index 4dd14385a06..28419772328 100644 --- a/dojo/cred/queries.py +++ b/dojo/cred/queries.py @@ -44,8 +44,6 @@ def get_authorized_cred_mappings(permission, queryset=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - cred_mappings = cred_mappings.filter( + return cred_mappings.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return cred_mappings diff --git a/dojo/cred/views.py b/dojo/cred/views.py index bddf5dd6a2e..2fc373c3ac9 100644 --- a/dojo/cred/views.py +++ b/dojo/cred/views.py @@ -38,7 +38,7 @@ def new_cred(request): return render(request, "dojo/new_cred.html", {"tform": tform}) -@user_is_authorized(Product, Permissions.Product_View, "pid") +@user_is_authorized(Product, Permissions.Product_Edit, "pid") def all_cred_product(request, pid): prod = get_object_or_404(Product, id=pid) creds = Cred_Mapping.objects.filter(product=prod).order_by("cred_id__name") @@ -641,10 +641,8 @@ def delete_cred_controller(request, destination_url, id, ttid): if destination_url == "cred": return HttpResponseRedirect(reverse(destination_url)) - else: - return HttpResponseRedirect(reverse(destination_url, args=(id, ))) - else: - tform = CredMappingForm(instance=cred) + return HttpResponseRedirect(reverse(destination_url, args=(id, ))) + tform = CredMappingForm(instance=cred) add_breadcrumb(title="Delete Credential", top_level=False, request=request) product_tab = None diff --git a/dojo/db_migrations/0214_test_type_dynamically_generated.py b/dojo/db_migrations/0214_test_type_dynamically_generated.py new file mode 100644 index 00000000000..80219377e7f --- /dev/null +++ b/dojo/db_migrations/0214_test_type_dynamically_generated.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.8 on 2024-09-04 19:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0213_system_settings_enable_ui_table_based_searching'), + ] + + operations = [ + migrations.AddField( + model_name='test_type', + name='dynamically_generated', + field=models.BooleanField(default=False, help_text='Set to True for test types that are created at import time'), + ), + ] diff --git a/dojo/db_migrations/0215_webhooks_notifications.py b/dojo/db_migrations/0215_webhooks_notifications.py new file mode 100644 index 00000000000..cc65ce43f1b --- /dev/null +++ b/dojo/db_migrations/0215_webhooks_notifications.py @@ -0,0 +1,130 @@ +# Generated by Django 5.0.8 on 2024-08-16 17:07 + +import django.db.models.deletion +import multiselectfield.db.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0214_test_type_dynamically_generated'), + ] + + operations = [ + migrations.AddField( + model_name='system_settings', + name='enable_webhooks_notifications', + field=models.BooleanField(default=False, verbose_name='Enable Webhook notifications'), + ), + migrations.AddField( + model_name='system_settings', + name='webhooks_notifications_timeout', + field=models.IntegerField(default=10, help_text='How many seconds will DefectDojo waits for response from webhook endpoint'), + ), + migrations.AlterField( + model_name='notifications', + name='auto_close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='code_review', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='engagement_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='jira_update', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=33, verbose_name='JIRA problems'), + ), + migrations.AlterField( + model_name='notifications', + name='other', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_type_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='review_requested', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='risk_acceptance_expiration', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) Risk Acceptance expiries', max_length=33, verbose_name='Risk Acceptance Expiration'), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added_empty', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=[], help_text='Triggered whenever an (re-)import has been done (even if that created/updated/closed no findings).', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches', max_length=33, verbose_name='SLA breach'), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach_combined', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches (a message per project)', max_length=33, verbose_name='SLA breach (combined)'), + ), + migrations.AlterField( + model_name='notifications', + name='stale_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='test_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='upcoming_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='user_mentioned', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.CreateModel( + name='Notification_Webhooks', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(default='', help_text='Name of the incoming webhook', max_length=100, unique=True)), + ('url', models.URLField(default='', help_text='The full URL of the incoming webhook')), + ('header_name', models.CharField(blank=True, default='', help_text='Name of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('header_value', models.CharField(blank=True, default='', help_text='Content of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('status', models.CharField(choices=[('active', 'Active'), ('active_tmp', 'Active but 5xx (or similar) error detected'), ('inactive_tmp', 'Temporary inactive because of 5xx (or similar) error'), ('inactive_permanent', 'Permanently inactive')], default='active', editable=False, help_text='Status of the incoming webhook', max_length=20)), + ('first_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened first time', null=True)), + ('last_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened last time', null=True)), + ('note', models.CharField(blank=True, default='', editable=False, help_text='Description of the latest error', max_length=1000, null=True)), + ('owner', models.ForeignKey(blank=True, help_text='Owner/receiver of notification, if empty processed as system notification', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user')), + ], + ), + ] diff --git a/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py b/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py new file mode 100644 index 00000000000..fe9378b77de --- /dev/null +++ b/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.8 on 2024-10-03 23:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0215_webhooks_notifications'), + ] + + operations = [ + migrations.AlterField( + model_name='jira_project', + name='push_all_issues', + field=models.BooleanField(blank=True, default=False, help_text='Automatically create JIRA tickets for verified findings. Once linked, the JIRA ticket will continue to sync, regardless of status in DefectDojo.'), + ), + ] diff --git a/dojo/db_migrations/0217_jira_project_enabled.py b/dojo/db_migrations/0217_jira_project_enabled.py new file mode 100644 index 00000000000..6bde35303ba --- /dev/null +++ b/dojo/db_migrations/0217_jira_project_enabled.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.8 on 2024-10-10 17:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0216_alter_jira_project_push_all_issues'), + ] + + operations = [ + migrations.AddField( + model_name='jira_project', + name='enabled', + field=models.BooleanField(blank=True, default=True, help_text='When disabled, Findings will no longer be pushed to Jira, even if they have already been pushed previously.', verbose_name='Enable Connection With Jira Project'), + ), + ] diff --git a/dojo/db_migrations/0218_system_settings_enforce_verified_status_and_more.py b/dojo/db_migrations/0218_system_settings_enforce_verified_status_and_more.py new file mode 100644 index 00000000000..beec72caffa --- /dev/null +++ b/dojo/db_migrations/0218_system_settings_enforce_verified_status_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.0.9 on 2024-10-22 19:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0217_jira_project_enabled'), + ] + + operations = [ + migrations.AddField( + model_name='system_settings', + name='enforce_verified_status', + field=models.BooleanField(default=True, help_text='When enabled, features such as product grading, jira integration, metrics, and reports will only interact with verified findings.', verbose_name='Enforce Verified Status'), + ), + migrations.AlterField( + model_name='jira_project', + name='push_all_issues', + field=models.BooleanField(blank=True, default=False, help_text='Automatically create JIRA tickets for verified findings, assuming enforce_verified_status is True, or for all findings otherwise. Once linked, the JIRA ticket will continue to sync, regardless of status in DefectDojo.'), + ), + ] diff --git a/dojo/decorators.py b/dojo/decorators.py index 129106c74de..8f356b0f623 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -43,8 +43,7 @@ def __wrapper__(*args, **kwargs): countdown = kwargs.pop("countdown", 0) if we_want_async(*args, func=func, **kwargs): return func.apply_async(args=args, kwargs=kwargs, countdown=countdown) - else: - return func(*args, **kwargs) + return func(*args, **kwargs) return __wrapper__ @@ -78,8 +77,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_to_id_internal - else: - return dojo_model_to_id_internal(_func) + return dojo_model_to_id_internal(_func) # decorator with parameters needs another wrapper layer @@ -123,8 +121,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_from_id_internal - else: - return dojo_model_from_id_internal(_func) + return dojo_model_from_id_internal(_func) def get_parameter_froms_args_kwargs(args, kwargs, parameter): @@ -147,22 +144,6 @@ def get_parameter_froms_args_kwargs(args, kwargs, parameter): return model_or_id -def on_exception_log_kwarg(func): - def wrapper(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - - except Exception: - logger.info(f"exception occured at url: {self.driver.current_url}") - logger.info(f"page source: {self.driver.page_source}") - f = open("/tmp/selenium_page_source.html", "w", encoding="utf-8") - f.writelines(self.driver.page_source) - # time.sleep(30) - raise - - return wrapper - - def dojo_ratelimit(key="ip", rate=None, method=UNSAFE, block=False): def decorator(fn): @wraps(fn) diff --git a/dojo/endpoint/queries.py b/dojo/endpoint/queries.py index 581feefc13b..684eeab7b1a 100644 --- a/dojo/endpoint/queries.py +++ b/dojo/endpoint/queries.py @@ -53,12 +53,10 @@ def get_authorized_endpoints(permission, queryset=None, user=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - endpoints = endpoints.filter( + return endpoints.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return endpoints - def get_authorized_endpoint_status(permission, queryset=None, user=None): @@ -101,8 +99,6 @@ def get_authorized_endpoint_status(permission, queryset=None, user=None): endpoint__product__member=Exists(authorized_product_roles), endpoint__product__prod_type__authorized_group=Exists(authorized_product_type_groups), endpoint__product__authorized_group=Exists(authorized_product_groups)) - endpoint_status = endpoint_status.filter( + return endpoint_status.filter( Q(endpoint__product__prod_type__member=True) | Q(endpoint__product__member=True) | Q(endpoint__product__prod_type__authorized_group=True) | Q(endpoint__product__authorized_group=True)) - - return endpoint_status diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index be1c63fb0c0..d5c378e5e97 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -79,17 +79,16 @@ def endpoint_get_or_create(**kwargs): count = qs.count() if count == 0: return Endpoint.objects.get_or_create(**kwargs) - elif count == 1: - return qs.order_by("id").first(), False - else: - logger.warning( - f"Endpoints in your database are broken. " - f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", - ) - # Get the oldest endpoint first, and return that instead - # a datetime is not captured on the endpoint model, so ID - # will have to work here instead + if count == 1: return qs.order_by("id").first(), False + logger.warning( + f"Endpoints in your database are broken. " + f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", + ) + # Get the oldest endpoint first, and return that instead + # a datetime is not captured on the endpoint model, so ID + # will have to work here instead + return qs.order_by("id").first(), False def clean_hosts_run(apps, change): @@ -325,7 +324,7 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me 'The column "hostname" must be present to map host to Endpoint.', extra_tags="alert-danger") return HttpResponseRedirect(reverse("import_endpoint_meta", args=(product.id, ))) - elif origin == "API": + if origin == "API": msg = 'The column "hostname" must be present to map host to Endpoint.' raise ValidationError(msg) @@ -361,14 +360,14 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me for tag in existing_tags: if item[0] not in tag: continue - else: - # found existing. Update it - existing_tags.remove(tag) - break + # found existing. Update it + existing_tags.remove(tag) + break existing_tags += [item[0] + ":" + item[1]] # if tags are not supposed to be added, this value remain unchanged endpoint.tags = existing_tags endpoint.save() + return None def remove_broken_endpoint_statuses(apps): diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index a411c9e70d4..5a85a0a6468 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -6,6 +6,7 @@ from django.conf import settings from django.contrib import messages from django.contrib.admin.utils import NestedObjects +from django.core.exceptions import PermissionDenied from django.db import DEFAULT_DB_ALIAS from django.db.models import Count, Q, QuerySet from django.http import HttpResponseRedirect @@ -97,9 +98,8 @@ def get_endpoint_ids(endpoints): key = f"{e.host}-{e.product.id}" if key in hosts: continue - else: - hosts.append(key) - ids.append(e.id) + hosts.append(key) + ids.append(e.id) return ids @@ -178,7 +178,7 @@ def view_endpoint_host(request, eid): return process_endpoint_view(request, eid, host_view=True) -@user_is_authorized(Endpoint, Permissions.Endpoint_View, "eid") +@user_is_authorized(Endpoint, Permissions.Endpoint_Edit, "eid") def edit_endpoint(request, eid): endpoint = get_object_or_404(Endpoint, id=eid) @@ -306,8 +306,7 @@ def add_meta_data(request, eid): extra_tags="alert-success") if "add_another" in request.POST: return HttpResponseRedirect(reverse("add_endpoint_meta_data", args=(eid,))) - else: - return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) + return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) else: form = DojoMetaDataForm() @@ -326,12 +325,12 @@ def edit_meta_data(request, eid): endpoint = Endpoint.objects.get(id=eid) if request.method == "POST": - for key, value in request.POST.items(): + for key, orig_value in request.POST.items(): if key.startswith("cfv_"): cfv_id = int(key.split("_")[1]) cfv = get_object_or_404(DojoMeta, id=cfv_id) - value = value.strip() + value = orig_value.strip() if value: cfv.value = value cfv.save() @@ -468,6 +467,9 @@ def prefetch_for_endpoints(endpoints): def migrate_endpoints_view(request): + if not request.user.is_superuser: + raise PermissionDenied + view_name = "Migrate endpoints" html_log = clean_hosts_run(apps=apps, change=(request.method == "POST")) diff --git a/dojo/engagement/queries.py b/dojo/engagement/queries.py index 9d8e9b6ae41..97eeb31bdfa 100644 --- a/dojo/engagement/queries.py +++ b/dojo/engagement/queries.py @@ -39,8 +39,6 @@ def get_authorized_engagements(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - engagements = engagements.filter( + return engagements.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return engagements diff --git a/dojo/engagement/services.py b/dojo/engagement/services.py index f11963867a4..18aed9e425b 100644 --- a/dojo/engagement/services.py +++ b/dojo/engagement/services.py @@ -16,7 +16,7 @@ def close_engagement(eng): eng.save() if jira_helper.get_jira_project(eng): - jira_helper.close_epic(eng, True) + jira_helper.close_epic(eng, push_to_jira=True) def reopen_engagement(eng): diff --git a/dojo/engagement/signals.py b/dojo/engagement/signals.py index e5429455f25..7b95d6fe87b 100644 --- a/dojo/engagement/signals.py +++ b/dojo/engagement/signals.py @@ -16,7 +16,7 @@ def engagement_post_save(sender, instance, created, **kwargs): if created: title = _('Engagement created for "%(product)s": %(name)s') % {"product": instance.product, "name": instance.name} create_notification(event="engagement_added", title=title, engagement=instance, product=instance.product, - url=reverse("view_engagement", args=(instance.id,))) + url=reverse("view_engagement", args=(instance.id,)), url_api=reverse("engagement-detail", args=(instance.id,))) @receiver(pre_save, sender=Engagement) @@ -28,7 +28,7 @@ def engagement_pre_save(sender, instance, **kwargs): title=_("Closure of %s") % instance.name, description=_('The engagement "%s" was closed') % (instance.name), engagement=instance, url=reverse("engagement_all_findings", args=(instance.id, ))) - elif instance.status in ["In Progress"] and old.status not in ["Not Started"]: + elif instance.status == "In Progress" and old.status != "Not Started": create_notification(event="engagement_reopened", title=_("Reopening of %s") % instance.name, engagement=instance, diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 138ae89238e..9cfab608896 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -7,7 +7,6 @@ from functools import reduce from tempfile import NamedTemporaryFile from time import strftime -from typing import List, Tuple from django.conf import settings from django.contrib import messages @@ -17,10 +16,11 @@ from django.db import DEFAULT_DB_ALIAS from django.db.models import Count, Q from django.db.models.query import Prefetch, QuerySet -from django.http import FileResponse, HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict, StreamingHttpResponse +from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict, StreamingHttpResponse from django.shortcuts import get_object_or_404, render from django.urls import Resolver404, reverse from django.utils import timezone +from django.utils.translation import gettext as _ from django.views import View from django.views.decorators.cache import cache_page from django.views.decorators.vary import vary_on_cookie @@ -67,6 +67,7 @@ TypedNoteForm, UploadThreatForm, ) +from dojo.importers.base_importer import BaseImporter from dojo.importers.default_importer import DefaultImporter from dojo.models import ( Check_List, @@ -98,6 +99,7 @@ add_success_message_to_response, async_delete, calculate_grade, + generate_file_response_from_file_path, get_cal_event, get_page_items, get_return_url, @@ -164,15 +166,13 @@ def get_filtered_engagements(request, view): filter_string_matching = get_system_setting("filter_string_matching", False) filter_class = EngagementDirectFilterWithoutObjectLookups if filter_string_matching else EngagementDirectFilter - engagements = filter_class(request.GET, queryset=engagements) - - return engagements + return filter_class(request.GET, queryset=engagements) def get_test_counts(engagements): # Get the test counts per engagement. As a separate query, this is much # faster than annotating the above `engagements` query. - engagement_test_counts = { + return { test["engagement"]: test["test_count"] for test in Test.objects.filter( engagement__in=engagements, @@ -182,7 +182,6 @@ def get_test_counts(engagements): test_count=Count("engagement"), ) } - return engagement_test_counts def engagements(request, view): @@ -302,9 +301,8 @@ def edit_engagement(request, eid): if "_Add Tests" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(engagement.id, ))) - else: - return HttpResponseRedirect( - reverse("view_engagement", args=(engagement.id, ))) + return HttpResponseRedirect( + reverse("view_engagement", args=(engagement.id, ))) else: logger.debug(form.errors) @@ -394,7 +392,7 @@ def copy_engagement(request, eid): messages.SUCCESS, "Engagement Copied successfully.", extra_tags="alert-success") - create_notification(event="engagement_copied", # TODO - if 'copy' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces + create_notification(event="engagement_copied", # TODO: - if 'copy' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces title=_("Copying of %s") % engagement.name, description=f'The engagement "{engagement.name}" was copied by {request.user}', product=product, @@ -402,12 +400,11 @@ def copy_engagement(request, eid): recipients=[engagement.lead], icon="exclamation-triangle") return redirect_to_return_url_or_else(request, reverse("view_engagements", args=(product.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy engagement, please try again.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to copy engagement, please try again.", + extra_tags="alert-danger") product_tab = Product_Tab(product, title="Copy Engagement", tab="engagements") return render(request, "dojo/copy_object.html", { @@ -425,13 +422,12 @@ def get_template(self): return "dojo/view_eng.html" def get_risks_accepted(self, eng): - risks_accepted = eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) - return risks_accepted + return eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) def get_filtered_tests( self, request: HttpRequest, - queryset: List[Test], + queryset: list[Test], engagement: Engagement, ): filter_string_matching = get_system_setting("filter_string_matching", False) @@ -671,10 +667,10 @@ def add_tests(request, eid): if "_Add Another Test" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(eng.id, ))) - elif "_Add Findings" in request.POST: + if "_Add Findings" in request.POST: return HttpResponseRedirect( reverse("add_findings", args=(new_test.id, ))) - elif "_Finished" in request.POST: + if "_Finished" in request.POST: return HttpResponseRedirect( reverse("view_engagement", args=(eng.id, ))) else: @@ -697,9 +693,7 @@ def add_tests(request, eid): class ImportScanResultsView(View): def get_template(self) -> str: - """ - Returns the template that will be presented to the user - """ + """Returns the template that will be presented to the user""" return "dojo/import_scan_results.html" def get_development_environment( @@ -716,12 +710,10 @@ def get_development_environment( def get_engagement_or_product( self, user: Dojo_User, - engagement_id: int = None, - product_id: int = None, - ) -> Tuple[Engagement, Product, Product | Engagement]: - """ - Using the path parameters, either fetch the product or engagement - """ + engagement_id: int | None = None, + product_id: int | None = None, + ) -> tuple[Engagement, Product, Product | Engagement]: + """Using the path parameters, either fetch the product or engagement""" engagement = product = engagement_or_product = None # Get the product if supplied # Get the engagement if supplied @@ -744,13 +736,10 @@ def get_form( request: HttpRequest, **kwargs: dict, ) -> ImportScanForm: - """ - Returns the default import form for importing findings - """ + """Returns the default import form for importing findings""" if request.method == "POST": return ImportScanForm(request.POST, request.FILES, **kwargs) - else: - return ImportScanForm(**kwargs) + return ImportScanForm(**kwargs) def get_credential_form( self, @@ -764,27 +753,24 @@ def get_credential_form( """ if request.method == "POST": return CredMappingForm(request.POST) - else: - # If the engagement is not present, return an empty form - if engagement is None: - return CredMappingForm() - # Otherwise get all creds in the associated engagement - return CredMappingForm( - initial={ - "cred_user_queryset": Cred_Mapping.objects.filter( - engagement=engagement, - ).order_by("cred_id"), - }, - ) + # If the engagement is not present, return an empty form + if engagement is None: + return CredMappingForm() + # Otherwise get all creds in the associated engagement + return CredMappingForm( + initial={ + "cred_user_queryset": Cred_Mapping.objects.filter( + engagement=engagement, + ).order_by("cred_id"), + }, + ) def get_jira_form( self, request: HttpRequest, engagement_or_product: Engagement | Product, - ) -> Tuple[JIRAImportScanForm | None, bool]: - """ - Returns a JiraImportScanForm if jira is enabled - """ + ) -> tuple[JIRAImportScanForm | None, bool]: + """Returns a JiraImportScanForm if jira is enabled""" jira_form = None push_all_jira_issues = False # Determine if jira issues should be pushed automatically @@ -808,7 +794,7 @@ def get_product_tab( self, product: Product, engagement: Engagement, - ) -> Tuple[Product_Tab, dict]: + ) -> tuple[Product_Tab, dict]: """ Determine how the product tab will be rendered, and what tab will be selected as currently active @@ -825,9 +811,9 @@ def get_product_tab( def handle_request( self, request: HttpRequest, - engagement_id: int = None, - product_id: int = None, - ) -> Tuple[HttpRequest, dict]: + engagement_id: int | None = None, + product_id: int | None = None, + ) -> tuple[HttpRequest, dict]: """ Process the common behaviors between request types, and then return the request and context dict back to be rendered @@ -921,15 +907,20 @@ def create_engagement( # Return the engagement return engagement + def get_importer( + self, + context: dict, + ) -> BaseImporter: + """Gets the importer to use""" + return DefaultImporter(**context) + def import_findings( self, context: dict, ) -> str | None: - """ - Attempt to import with all the supplied information - """ + """Attempt to import with all the supplied information""" try: - importer_client = DefaultImporter(**context) + importer_client = self.get_importer(context) context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan( context.pop("scan", None), ) @@ -949,9 +940,7 @@ def process_form( form: ImportScanForm, context: dict, ) -> str | None: - """ - Process the form and manipulate the input in any way that is appropriate - """ + """Process the form and manipulate the input in any way that is appropriate""" # Update the running context dict with cleaned form input context.update({ "scan": request.FILES.get("file", None), @@ -1021,9 +1010,7 @@ def process_credentials_form( form: CredMappingForm, context: dict, ) -> str | None: - """ - Process the credentials form by creating - """ + """Process the credentials form by creating""" if cred_user := form.cleaned_data["cred_user"]: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( @@ -1043,18 +1030,14 @@ def success_redirect( self, context: dict, ) -> HttpResponseRedirect: - """ - Redirect the user to a place that indicates a successful import - """ + """Redirect the user to a place that indicates a successful import""" return HttpResponseRedirect(reverse("view_test", args=(context.get("test").id, ))) def failure_redirect( self, context: dict, ) -> HttpResponseRedirect: - """ - Redirect the user to a place that indicates a failed import - """ + """Redirect the user to a place that indicates a failed import""" return HttpResponseRedirect(reverse( "import_scan_results", args=(context.get("engagement", context.get("product")).id, ), @@ -1063,12 +1046,10 @@ def failure_redirect( def get( self, request: HttpRequest, - engagement_id: int = None, - product_id: int = None, + engagement_id: int | None = None, + product_id: int | None = None, ) -> HttpResponse: - """ - Process GET requests for the Import View - """ + """Process GET requests for the Import View""" # process the request and path parameters request, context = self.handle_request( request, @@ -1081,12 +1062,10 @@ def get( def post( self, request: HttpRequest, - engagement_id: int = None, - product_id: int = None, + engagement_id: int | None = None, + product_id: int | None = None, ) -> HttpResponse: - """ - Process POST requests for the Import View - """ + """Process POST requests for the Import View""" # process the request and path parameters request, context = self.handle_request( request, @@ -1239,7 +1218,7 @@ def add_risk_acceptance(request, eid, fid=None): findings = form.cleaned_data["accepted_findings"] - risk_acceptance = ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings) + risk_acceptance = ra_helper.add_findings_to_risk_acceptance(request.user, risk_acceptance, findings) messages.add_message( request, @@ -1349,7 +1328,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): finding = get_object_or_404( Finding, pk=request.POST["remove_finding_id"]) - ra_helper.remove_finding_from_risk_acceptance(risk_acceptance, finding) + ra_helper.remove_finding_from_risk_acceptance(request.user, risk_acceptance, finding) messages.add_message( request, @@ -1380,7 +1359,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): if not errors: findings = add_findings_form.cleaned_data["accepted_findings"] - ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings) + ra_helper.add_findings_to_risk_acceptance(request.user, risk_acceptance, findings) messages.add_message( request, @@ -1390,8 +1369,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): if not errors: logger.debug("redirecting to return_url") return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid))) - else: - logger.error("errors found") + logger.error("errors found") else: if edit_mode: @@ -1538,8 +1516,7 @@ def upload_threatmodel(request, eid): @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") def view_threatmodel(request, eid): eng = get_object_or_404(Engagement, pk=eid) - response = FileResponse(open(eng.tmodel_path, "rb")) - return response + return generate_file_response_from_file_path(eng.tmodel_path) @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") @@ -1578,9 +1555,7 @@ def get_engagements(request): if not url: msg = "Please use the export button when exporting engagements" raise ValidationError(msg) - else: - if url.startswith("url="): - url = url[4:] + url = url.removeprefix("url=") path_items = list(filter(None, re.split(r"/|\?", url))) diff --git a/dojo/filters.py b/dojo/filters.py index 9916ac31e21..35ceb205938 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -331,8 +331,7 @@ def get_tags_model_from_field_name(field): def get_tags_label_from_model(model): if model: return f"Tags ({model.__name__.title()})" - else: - return "Tags (Unknown)" + return "Tags (Unknown)" def get_finding_filterset_fields(metrics=False, similar=False, filter_string_matching=False): @@ -605,7 +604,7 @@ def __init__(self, *args, **kwargs): class DateRangeFilter(ChoiceFilter): options = { - None: (_("Any date"), lambda qs, name: qs.all()), + None: (_("Any date"), lambda qs, _: qs.all()), 1: (_("Today"), lambda qs, name: qs.filter(**{ f"{name}__year": now().year, f"{name}__month": now().month, @@ -651,7 +650,7 @@ def filter(self, qs, value): class DateRangeOmniFilter(ChoiceFilter): options = { - None: (_("Any date"), lambda qs, name: qs.all()), + None: (_("Any date"), lambda qs, _: qs.all()), 1: (_("Today"), lambda qs, name: qs.filter(**{ f"{name}__year": now().year, f"{name}__month": now().month, @@ -713,7 +712,7 @@ def filter(self, qs, value): class ReportBooleanFilter(ChoiceFilter): options = { - None: (_("Either"), lambda qs, name: qs.all()), + None: (_("Either"), lambda qs, _: qs.all()), 1: (_("Yes"), lambda qs, name: qs.filter(**{ f"{name}": True, })), @@ -780,6 +779,7 @@ def any(self, qs, name): self.start_date = _truncate(start_date - timedelta(days=1)) self.end_date = _truncate(now() + timedelta(days=1)) return qs.all() + return None def current_month(self, qs, name): self.start_date = local_tz.localize( @@ -1420,13 +1420,16 @@ class ApiFindingFilter(DojoFilter): # DateRangeFilter created = DateRangeFilter() date = DateRangeFilter() - on = DateFilter(field_name="date", lookup_expr="exact") - before = DateFilter(field_name="date", lookup_expr="lt") - after = DateFilter(field_name="date", lookup_expr="gt") + discovered_on = DateFilter(field_name="date", lookup_expr="exact") + discovered_before = DateFilter(field_name="date", lookup_expr="lt") + discovered_after = DateFilter(field_name="date", lookup_expr="gt") jira_creation = DateRangeFilter(field_name="jira_issue__jira_creation") jira_change = DateRangeFilter(field_name="jira_issue__jira_change") last_reviewed = DateRangeFilter() mitigated = DateRangeFilter() + mitigated_on = DateFilter(field_name="mitigated", lookup_expr="exact") + mitigated_before = DateFilter(field_name="mitigated", lookup_expr="lt") + mitigated_after = DateFilter(field_name="mitigated", lookup_expr="gt") # NumberInFilter cwe = NumberInFilter(field_name="cwe", lookup_expr="in") defect_review_requested_by = NumberInFilter(field_name="defect_review_requested_by", lookup_expr="in") @@ -1543,10 +1546,10 @@ def filter(self, qs, value): class FindingFilterHelper(FilterSet): title = CharFilter(lookup_expr="icontains") - date = DateFromToRangeFilter(field_name="date", label="Date Discovered") - on = DateFilter(field_name="date", lookup_expr="exact", label="On") - before = DateFilter(field_name="date", lookup_expr="lt", label="Before") - after = DateFilter(field_name="date", lookup_expr="gt", label="After") + date = DateRangeFilter(field_name="date", label="Date Discovered") + on = DateFilter(field_name="date", lookup_expr="exact", label="Discovered On") + before = DateFilter(field_name="date", lookup_expr="lt", label="Discovered Before") + after = DateFilter(field_name="date", lookup_expr="gt", label="Discovered After") last_reviewed = DateRangeFilter() last_status_update = DateRangeFilter() cwe = MultipleChoiceFilter(choices=[]) @@ -1554,7 +1557,10 @@ class FindingFilterHelper(FilterSet): severity = MultipleChoiceFilter(choices=SEVERITY_CHOICES) duplicate = ReportBooleanFilter() is_mitigated = ReportBooleanFilter() - mitigated = DateRangeFilter(label="Mitigated Date") + mitigated = DateRangeFilter(field_name="mitigated", label="Mitigated Date") + mitigated_on = DateFilter(field_name="mitigated", lookup_expr="exact", label="Mitigated On") + mitigated_before = DateFilter(field_name="mitigated", lookup_expr="lt", label="Mitigated Before") + mitigated_after = DateFilter(field_name="mitigated", lookup_expr="gt", label="Mitigated After") planned_remediation_date = DateRangeOmniFilter() planned_remediation_version = CharFilter(lookup_expr="icontains", label=_("Planned remediation version")) file_path = CharFilter(lookup_expr="icontains") @@ -1663,6 +1669,9 @@ def set_date_fields(self, *args: list, **kwargs: dict): self.form.fields["on"].widget = date_input_widget self.form.fields["before"].widget = date_input_widget self.form.fields["after"].widget = date_input_widget + self.form.fields["mitigated_on"].widget = date_input_widget + self.form.fields["mitigated_before"].widget = date_input_widget + self.form.fields["mitigated_after"].widget = date_input_widget self.form.fields["cwe"].choices = cwe_options(self.queryset) @@ -1830,7 +1839,7 @@ def set_related_object_fields(self, *args: list, **kwargs: dict): if self.pid is not None: del self.form.fields["test__engagement__product"] del self.form.fields["test__engagement__product__prod_type"] - # TODO add authorized check to be sure + # TODO: add authorized check to be sure self.form.fields["test__engagement"].queryset = Engagement.objects.filter( product_id=self.pid, ).all() @@ -1918,8 +1927,7 @@ def set_hash_codes(self, *args: list, **kwargs: dict): def filter_queryset(self, *args: list, **kwargs: dict): queryset = super().filter_queryset(*args, **kwargs) queryset = get_authorized_findings(Permissions.Finding_View, queryset, self.user) - queryset = queryset.exclude(pk=self.finding.pk) - return queryset + return queryset.exclude(pk=self.finding.pk) class SimilarFindingFilter(FindingFilter, SimilarFindingHelper): @@ -2874,6 +2882,7 @@ class Meta: class ReportFindingFilterHelper(FilterSet): title = CharFilter(lookup_expr="icontains", label="Name") date = DateFromToRangeFilter(field_name="date", label="Date Discovered") + date_recent = DateRangeFilter(field_name="date", label="Relative Date") severity = MultipleChoiceFilter(choices=SEVERITY_CHOICES) active = ReportBooleanFilter() is_mitigated = ReportBooleanFilter() @@ -3228,7 +3237,7 @@ class Meta: filter_overrides = { JSONField: { "filter_class": CharFilter, - "extra": lambda f: { + "extra": lambda _: { "lookup_expr": "icontains", }, }, diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index c6ec5d875bc..1182cb26d68 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -55,7 +55,7 @@ def pre_save_finding_status_change(sender, instance, changed_fields=None, **kwar logger.debug("%i: changed status fields pre_save: %s", instance.id or 0, changed_fields) for field, (old, new) in changed_fields.items(): - logger.debug("%i: %s changed from %s to %s" % (instance.id or 0, field, old, new)) + logger.debug("%i: %s changed from %s to %s", instance.id or 0, field, old, new) user = None if get_current_user() and get_current_user().is_authenticated: user = get_current_user() @@ -248,8 +248,7 @@ def get_group_by_group_name(finding, finding_group_by_option): group_name = finding.component_name elif finding_group_by_option == "component_name+component_version": if finding.component_name or finding.component_version: - group_name = "{}:{}".format((finding.component_name if finding.component_name else "None"), - (finding.component_version if finding.component_version else "None")) + group_name = "{}:{}".format(finding.component_name or "None", finding.component_version or "None") elif finding_group_by_option == "file_path": if finding.file_path: group_name = f"Filepath {finding.file_path}" @@ -567,25 +566,25 @@ def engagement_post_delete(sender, instance, **kwargs): def fix_loop_duplicates(): - """ Due to bugs in the past and even currently when under high parallel load, there can be transitive duplicates. """ + """Due to bugs in the past and even currently when under high parallel load, there can be transitive duplicates.""" """ i.e. A -> B -> C. This can lead to problems when deleting findingns, performing deduplication, etc """ candidates = Finding.objects.filter(duplicate_finding__isnull=False, original_finding__isnull=False).order_by("-id") loop_count = len(candidates) if loop_count > 0: - deduplicationLogger.info("Identified %d Findings with Loops" % len(candidates)) + deduplicationLogger.info(f"Identified {len(candidates)} Findings with Loops") for find_id in candidates.values_list("id", flat=True): removeLoop(find_id, 50) new_originals = Finding.objects.filter(duplicate_finding__isnull=True, duplicate=True) for f in new_originals: - deduplicationLogger.info("New Original: %d " % f.id) + deduplicationLogger.info(f"New Original: {f.id}") f.duplicate = False super(Finding, f).save() loop_count = Finding.objects.filter(duplicate_finding__isnull=False, original_finding__isnull=False).count() - deduplicationLogger.info("%d Finding found which still has Loops, please run fix loop duplicates again" % loop_count) + deduplicationLogger.info(f"{loop_count} Finding found which still has Loops, please run fix loop duplicates again") return loop_count diff --git a/dojo/finding/queries.py b/dojo/finding/queries.py index 7f213805a49..47386e43f86 100644 --- a/dojo/finding/queries.py +++ b/dojo/finding/queries.py @@ -68,14 +68,12 @@ def get_authorized_findings(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_stub_findings(permission): user = get_current_user() @@ -101,14 +99,12 @@ def get_authorized_stub_findings(permission): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)).order_by("id") - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_vulnerability_ids(permission, queryset=None, user=None): @@ -151,10 +147,8 @@ def get_authorized_vulnerability_ids(permission, queryset=None, user=None): finding__test__engagement__product__member=Exists(authorized_product_roles), finding__test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists(authorized_product_groups)) - vulnerability_ids = vulnerability_ids.filter( + return vulnerability_ids.filter( Q(finding__test__engagement__product__prod_type__member=True) | Q(finding__test__engagement__product__member=True) | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) - - return vulnerability_ids diff --git a/dojo/finding/views.py b/dojo/finding/views.py index f40cc11ff84..647bcccb43b 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -135,6 +135,9 @@ def prefetch_for_findings(findings, prefetch_type="all", exclude_untouched=True) if isinstance( findings, QuerySet, ): # old code can arrive here with prods being a list because the query was already executed + prefetched_findings = prefetched_findings.prefetch_related( + "reviewers", + ) prefetched_findings = prefetched_findings.prefetch_related("reporter") prefetched_findings = prefetched_findings.prefetch_related( "jira_issue__jira_project__jira_instance", @@ -264,9 +267,9 @@ class BaseListFindings: def __init__( self, filter_name: str = "All", - product_id: int = None, - engagement_id: int = None, - test_id: int = None, + product_id: int | None = None, + engagement_id: int | None = None, + test_id: int | None = None, order_by: str = "numerical_severity", prefetch_type: str = "all", ): @@ -310,31 +313,29 @@ def get_test_id(self): def filter_findings_by_object(self, findings: QuerySet[Finding]): if product_id := self.get_product_id(): return findings.filter(test__engagement__product__id=product_id) - elif engagement_id := self.get_engagement_id(): + if engagement_id := self.get_engagement_id(): return findings.filter(test__engagement=engagement_id) - elif test_id := self.get_test_id(): + if test_id := self.get_test_id(): return findings.filter(test=test_id) - else: - return findings + return findings def filter_findings_by_filter_name(self, findings: QuerySet[Finding]): filter_name = self.get_filter_name() if filter_name == "Open": return findings.filter(finding_helper.OPEN_FINDINGS_QUERY) - elif filter_name == "Verified": + if filter_name == "Verified": return findings.filter(finding_helper.VERIFIED_FINDINGS_QUERY) - elif filter_name == "Out of Scope": + if filter_name == "Out of Scope": return findings.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY) - elif filter_name == "False Positive": + if filter_name == "False Positive": return findings.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY) - elif filter_name == "Inactive": + if filter_name == "Inactive": return findings.filter(finding_helper.INACTIVE_FINDINGS_QUERY) - elif filter_name == "Accepted": + if filter_name == "Accepted": return findings.filter(finding_helper.ACCEPTED_FINDINGS_QUERY) - elif filter_name == "Closed": + if filter_name == "Closed": return findings.filter(finding_helper.CLOSED_FINDINGS_QUERY) - else: - return findings + return findings def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Finding]): # Set up the args for the form @@ -357,9 +358,7 @@ def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Findi def get_filtered_findings(self): findings = get_authorized_findings(Permissions.Finding_View).order_by(self.get_order_by()) findings = self.filter_findings_by_object(findings) - findings = self.filter_findings_by_filter_name(findings) - - return findings + return self.filter_findings_by_filter_name(findings) def get_fully_filtered_findings(self, request: HttpRequest): findings = self.get_filtered_findings() @@ -423,7 +422,7 @@ def add_breadcrumbs(self, request: HttpRequest, context: dict): return request, context - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): # Store the product and engagement ids self.product_id = product_id self.engagement_id = engagement_id @@ -449,43 +448,43 @@ def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = class ListOpenFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Open" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListVerifiedFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Verified" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListOutOfScopeFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Out of Scope" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListFalsePositiveFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "False Positive" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListInactiveFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Inactive" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListAcceptedFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Accepted" return super().get(request, product_id=product_id, engagement_id=engagement_id) class ListClosedFindings(ListFindings): - def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + def get(self, request: HttpRequest, product_id: int | None = None, engagement_id: int | None = None): self.filter_name = "Closed" self.order_by = "-mitigated" return super().get(request, product_id=product_id, engagement_id=engagement_id) @@ -990,10 +989,10 @@ def process_finding_form(self, request: HttpRequest, finding: Finding, context: # Handle risk exception related things if "risk_accepted" in context["form"].cleaned_data and context["form"]["risk_accepted"].value(): if new_finding.test.engagement.product.enable_simple_risk_acceptance: - ra_helper.simple_risk_accept(new_finding, perform_save=False) + ra_helper.simple_risk_accept(request.user, new_finding, perform_save=False) else: if new_finding.risk_accepted: - ra_helper.risk_unaccept(new_finding, perform_save=False) + ra_helper.risk_unaccept(request.user, new_finding, perform_save=False) # Save and add new endpoints finding_helper.add_endpoints(new_finding, context["form"]) # Remove unrelated endpoints @@ -1016,9 +1015,8 @@ def process_finding_form(self, request: HttpRequest, finding: Finding, context: ) return finding, request, True - else: - add_error_message_to_response("The form has errors, please correct them below.") - add_field_errors_to_response(context["form"]) + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) return finding, request, False @@ -1073,8 +1071,7 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic ) return request, True, push_to_jira - else: - add_field_errors_to_response(context["jform"]) + add_field_errors_to_response(context["jform"]) return request, False, False @@ -1089,8 +1086,7 @@ def process_github_form(self, request: HttpRequest, finding: Finding, context: d add_external_issue(finding, "github") return request, True - else: - add_field_errors_to_response(context["gform"]) + add_field_errors_to_response(context["gform"]) return request, False @@ -1269,7 +1265,7 @@ def close_finding(request, fid): status.last_modified = timezone.now() status.save() # Clear the risk acceptance, if present - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) # Manage the jira status changes push_to_jira = False @@ -1315,10 +1311,9 @@ def close_finding(request, fid): return HttpResponseRedirect( reverse("view_test", args=(finding.test.id,)), ) - else: - return HttpResponseRedirect( - reverse("close_finding", args=(finding.id,)), - ) + return HttpResponseRedirect( + reverse("close_finding", args=(finding.id,)), + ) product_tab = Product_Tab( finding.test.engagement.product, title="Close", tab="findings", @@ -1445,7 +1440,7 @@ def reopen_finding(request, fid): status.last_modified = timezone.now() status.save() # Clear the risk acceptance, if present - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) # Manage the jira status changes push_to_jira = False @@ -1501,15 +1496,14 @@ def apply_template_cwe(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_finding", args=(fid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to apply CWE template finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to apply CWE template finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") @@ -1535,7 +1529,7 @@ def copy_finding(request, fid): extra_tags="alert-success", ) create_notification( - event="finding_copied", # TODO - if 'copy' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces + event="finding_copied", # TODO: - if 'copy' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces title=_("Copying of %s") % finding.title, description=f'The finding "{finding.title}" was copied by {request.user} to {test.title}', product=product, @@ -1548,13 +1542,12 @@ def copy_finding(request, fid): return redirect_to_return_url_or_else( request, reverse("view_test", args=(test.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy finding, please try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to copy finding, please try again.", + extra_tags="alert-danger", + ) product_tab = Product_Tab(product, title="Copy Finding", tab="findings") return render( @@ -1625,7 +1618,7 @@ def simple_risk_accept(request, fid): if not finding.test.engagement.product.enable_simple_risk_acceptance: raise PermissionDenied - ra_helper.simple_risk_accept(finding) + ra_helper.simple_risk_accept(request.user, finding) messages.add_message( request, messages.WARNING, "Finding risk accepted.", extra_tags="alert-success", @@ -1639,7 +1632,7 @@ def simple_risk_accept(request, fid): @user_is_authorized(Finding, Permissions.Risk_Acceptance, "fid") def risk_unaccept(request, fid): finding = get_object_or_404(Finding, id=fid) - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) messages.add_message( request, @@ -1711,7 +1704,7 @@ def request_finding_review(request, fid): logger.debug(f"Asking {reviewers_string} for review") create_notification( - event="review_requested", # TODO - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces + event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces title="Finding review requested", requested_by=user, note=new_note, @@ -2001,8 +1994,7 @@ def apply_template_to_finding(request, fid, tid): ) return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) - else: - return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) + return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) @user_is_authorized(Test, Permissions.Finding_Add, "tid") @@ -2062,15 +2054,14 @@ def delete_stub_finding(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_test", args=(tid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete potential finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete potential finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Stub_Finding, Permissions.Finding_Edit, "fid") @@ -2187,13 +2178,12 @@ def promote_to_finding(request, fid): ) return HttpResponseRedirect(reverse("view_test", args=(test.id,))) - else: - form_error = True - add_error_message_to_response( - "The form has errors, please correct them below.", - ) - add_field_errors_to_response(jform) - add_field_errors_to_response(form) + form_error = True + add_error_message_to_response( + "The form has errors, please correct them below.", + ) + add_field_errors_to_response(jform) + add_field_errors_to_response(form) else: form = PromoteFindingForm( initial={ @@ -2355,13 +2345,12 @@ def add_template(request): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) add_breadcrumb(title="Add Template", top_level=False, request=request) return render( request, "dojo/add_template.html", {"form": form, "name": "Add Template"}, @@ -2410,15 +2399,14 @@ def edit_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) - count = apply_cwe_mitigation(True, template, False) + count = apply_cwe_mitigation(apply_to_findings=True, template=template, update=False) add_breadcrumb(title="Edit Template", top_level=False, request=request) return render( request, @@ -2446,15 +2434,14 @@ def delete_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete Template, please revise and try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete Template, please revise and try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied def download_finding_pic(request, token): @@ -2660,13 +2647,12 @@ def merge_finding_product(request, pid): return HttpResponseRedirect( reverse("edit_finding", args=(finding_to_merge_into.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to merge findings. Findings to merge contained in finding to merge into.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to merge findings. Findings to merge contained in finding to merge into.", + extra_tags="alert-danger", + ) else: messages.add_message( request, @@ -2850,9 +2836,9 @@ def finding_bulk_update_all(request, pid=None): ): skipped_risk_accept_count += 1 else: - ra_helper.simple_risk_accept(finding) + ra_helper.simple_risk_accept(request.user, finding) elif form.cleaned_data["risk_unaccept"]: - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) for prod in prods: calculate_grade(prod) @@ -2861,9 +2847,8 @@ def finding_bulk_update_all(request, pid=None): messages.add_message( request, messages.WARNING, - ("Skipped simple risk acceptance of %i findings, " - "simple risk acceptance is disabled on the related products") - % skipped_risk_accept_count, + (f"Skipped simple risk acceptance of {skipped_risk_accept_count} findings, " + "simple risk acceptance is disabled on the related products"), extra_tags="alert-warning", ) @@ -2962,8 +2947,7 @@ def finding_bulk_update_all(request, pid=None): if grouped: add_success_message_to_response( - "Grouped %d findings into %d (%d newly created) finding groups" - % (grouped, len(finding_groups), groups_created), + f"Grouped {grouped} findings into {len(finding_groups)} ({groups_created} newly created) finding groups", ) if skipped: @@ -3041,15 +3025,10 @@ def finding_bulk_update_all(request, pid=None): success_count += 1 for error_message, error_count in error_counts.items(): - add_error_message_to_response( - "%i finding groups could not be pushed to JIRA: %s" - % (error_count, error_message), - ) + add_error_message_to_response("{error_count} finding groups could not be pushed to JIRA: {error_message}") if success_count > 0: - add_success_message_to_response( - "%i finding groups pushed to JIRA successfully" % success_count, - ) + add_success_message_to_response(f"{success_count} finding groups pushed to JIRA successfully") groups_pushed_to_jira = True # refresh from db @@ -3101,15 +3080,10 @@ def finding_bulk_update_all(request, pid=None): success_count += 1 for error_message, error_count in error_counts.items(): - add_error_message_to_response( - "%i findings could not be pushed to JIRA: %s" - % (error_count, error_message), - ) + add_error_message_to_response(f"{error_count} findings could not be pushed to JIRA: {error_message}") if success_count > 0: - add_success_message_to_response( - "%i findings pushed to JIRA successfully" % success_count, - ) + add_success_message_to_response(f"{success_count} findings pushed to JIRA successfully") if updated_find_count > 0: messages.add_message( @@ -3148,8 +3122,7 @@ def find_available_notetypes(notes): break else: available_note_types.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=available_note_types).order_by("-id") - return queryset + return Note_Type.objects.filter(id__in=available_note_types).order_by("-id") def get_missing_mandatory_notetypes(finding): @@ -3164,8 +3137,7 @@ def get_missing_mandatory_notetypes(finding): break else: notes_to_be_added.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=notes_to_be_added) - return queryset + return Note_Type.objects.filter(id__in=notes_to_be_added) @user_is_authorized(Finding, Permissions.Finding_Edit, "original_id") @@ -3499,34 +3471,32 @@ def calculate_possible_related_actions_for_similar_finding( else: # similar is not a duplicate yet if finding.duplicate or finding.original_finding.all(): - actions.append( + actions.extend(( { "action": "mark_finding_duplicate", "reason": "Will mark this finding as duplicate of the root finding in this cluster", - }, - ) - actions.append( - { + }, { "action": "set_finding_as_original", - "reason": ("Sets this finding as the Original for the whole cluster. " - "The existing Original will be downgraded to become a member of the cluster and, " - "together with the other members, will be marked as duplicate of the new Original."), + "reason": ( + "Sets this finding as the Original for the whole cluster. " + "The existing Original will be downgraded to become a member of the cluster and, " + "together with the other members, will be marked as duplicate of the new Original." + ), }, - ) + )) else: # similar_finding is not an original/root of a cluster as per earlier if clause - actions.append( + actions.extend(( { "action": "mark_finding_duplicate", "reason": "Will mark this finding as duplicate of the finding on this page.", - }, - ) - actions.append( - { + }, { "action": "set_finding_as_original", - "reason": ("Sets this finding as the Original marking the finding " - "on this page as duplicate of this original."), + "reason": ( + "Sets this finding as the Original marking the finding " + "on this page as duplicate of this original." + ), }, - ) + )) return actions diff --git a/dojo/finding_group/queries.py b/dojo/finding_group/queries.py index aae57f53c83..39b91c02665 100644 --- a/dojo/finding_group/queries.py +++ b/dojo/finding_group/queries.py @@ -46,10 +46,8 @@ def get_authorized_finding_groups(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - finding_groups = finding_groups.filter( + return finding_groups.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - - return finding_groups diff --git a/dojo/finding_group/views.py b/dojo/finding_group/views.py index 546dae93763..814d88888ed 100644 --- a/dojo/finding_group/views.py +++ b/dojo/finding_group/views.py @@ -74,8 +74,7 @@ def view_finding_group(request, fgid): if jira_issue: # See if the submitted issue was a issue key or the full URL jira_instance = jira_helper.get_jira_project(finding_group).jira_instance - if jira_issue.startswith(jira_instance.url + "/browse/"): - jira_issue = jira_issue[len(jira_instance.url + "/browse/"):] + jira_issue = jira_issue.removeprefix(jira_instance.url + "/browse/") if finding_group.has_jira_issue and not jira_issue == jira_helper.get_jira_key(finding_group): jira_helper.unlink_jira(request, finding_group) diff --git a/dojo/fixtures/defect_dojo_sample_data.json b/dojo/fixtures/defect_dojo_sample_data.json index 2ea53dc83b2..2d0ece6cb16 100644 --- a/dojo/fixtures/defect_dojo_sample_data.json +++ b/dojo/fixtures/defect_dojo_sample_data.json @@ -35210,6 +35210,7 @@ "engagement": null, "component": "", "push_all_issues": false, + "enabled": true, "enable_engagement_epic_mapping": true, "push_notes": false, "product_jira_sla_notification": false, @@ -35227,6 +35228,7 @@ "engagement": null, "component": "", "push_all_issues": true, + "enabled": true, "enable_engagement_epic_mapping": true, "push_notes": true, "product_jira_sla_notification": false, @@ -35244,6 +35246,7 @@ "engagement": null, "component": "", "push_all_issues": false, + "enabled": true, "enable_engagement_epic_mapping": false, "push_notes": false, "product_jira_sla_notification": false, @@ -44225,7 +44228,7 @@ "modified": "2015-03-30T20:06:15Z", "order": 1, "optional": false, - "text": "If you’re managing passwords or keys across multiple servers/endpoints, where and how is that information stored?" + "text": "If you're managing passwords or keys across multiple servers/endpoints, where and how is that information stored?" } }, { diff --git a/dojo/fixtures/dojo_testdata.json b/dojo/fixtures/dojo_testdata.json index 62486cb90cf..ae550f8bf81 100644 --- a/dojo/fixtures/dojo_testdata.json +++ b/dojo/fixtures/dojo_testdata.json @@ -227,6 +227,7 @@ "url_prefix": "", "enable_slack_notifications": false, "enable_mail_notifications": false, + "enable_webhooks_notifications": true, "email_from": "no-reply@example.com", "false_positive_history": false, "msteams_url": "", @@ -2926,11 +2927,27 @@ "pk": 1, "model": "dojo.notifications", "fields": { - "product": 1, - "user": 2, - "product_type_added": [ - "slack" - ] + "product": null, + "user": null, + "template": false, + "product_type_added": "webhooks,alert", + "product_added": "webhooks,alert", + "engagement_added": "webhooks,alert", + "test_added": "webhooks,alert", + "scan_added": "webhooks,alert", + "scan_added_empty": "webhooks", + "jira_update": "alert", + "upcoming_engagement": "alert", + "stale_engagement": "alert", + "auto_close_engagement": "alert", + "close_engagement": "alert", + "user_mentioned": "alert", + "code_review": "alert", + "review_requested": "alert", + "other": "alert", + "sla_breach": "alert", + "risk_acceptance_expiration": "alert", + "sla_breach_combined": "alert" } }, { @@ -3045,5 +3062,35 @@ "dismissable": true, "style": "danger" } + }, + { + "model": "dojo.notification_webhooks", + "pk": 1, + "fields": { + "name": "My webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token xxx", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": null + } + }, + { + "model": "dojo.notification_webhooks", + "pk": 2, + "fields": { + "name": "My personal webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token secret", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": 2 + } } ] \ No newline at end of file diff --git a/dojo/fixtures/sla_configurations.json b/dojo/fixtures/sla_configurations.json new file mode 100644 index 00000000000..f90d022581d --- /dev/null +++ b/dojo/fixtures/sla_configurations.json @@ -0,0 +1,35 @@ +[ + { + "model": "dojo.sla_configuration", + "pk": 1, + "fields": { + "name": "Default", + "description": "The Default SLA Configuration. Products not using an explicit SLA Configuration will use this one.", + "critical": 7, + "enforce_critical": true, + "high": 30, + "enforce_high": true, + "medium": 90, + "enforce_medium": true, + "low": 120, + "enforce_low": true, + "async_updating": false + } + }, + { + "model": "dojo.sla_configuration", + "fields": { + "name": "No SLA Enforced", + "description": "No SLA is enforced for a product which uses this SLA configuration.", + "critical": 7, + "enforce_critical": false, + "high": 30, + "enforce_high": false, + "medium": 90, + "enforce_medium": false, + "low": 120, + "enforce_low": false, + "async_updating": false + } + } +] diff --git a/dojo/fixtures/unit_metrics_additional_data.json b/dojo/fixtures/unit_metrics_additional_data.json new file mode 100644 index 00000000000..721e47eaac6 --- /dev/null +++ b/dojo/fixtures/unit_metrics_additional_data.json @@ -0,0 +1,482 @@ +[ + { + "pk": 240, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null + } + }, + { + "pk": 241, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null, + "risk_accepted": true + } + }, + { + "pk": 242, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null, + "risk_accepted": true + } + }, + { + "pk": 243, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-31", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "DUMMY FINDING", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "MITIGATION", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": 1, + "file_path": "", + "duplicate_finding": null, + "description": "TEST finding", + "mitigated_by": null, + "reporter": 2, + "mitigated": null, + "active": false, + "line": 100, + "under_review": false, + "defect_review_requested_by": 2, + "review_requested_by": 2, + "thread_id": 1, + "url": "http://www.example.com", + "notes": [ + 1 + ], + "dynamic_finding": false, + "hash_code": "c89d25e445b088ba339908f68e15e3177b78d22f3039d1bfea51c4be251bf4e0", + "last_reviewed": null, + "risk_accepted": true, + "is_mitigated": true + } + }, + { + "pk": 244, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-29", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": true, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 245, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-27", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 22, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 246, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-02", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 22, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 247, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 12345, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-03", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 55, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 248, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 6789, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-27", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": true, + "severity": "Low", + "title": "UID Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 77, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "6f8d0bf970c14175e597843f4679769a4775742549d90f902ff803de9244c7e1", + "last_reviewed": null, + "is_mitigated": true + } + }, + { + "pk": 249, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 6789, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-04", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "UID Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 77, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 224, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "6f8d0bf970c14175e597843f4679769a4775742549d90f902ff803de9244c7e1", + "last_reviewed": null + } + } +] \ No newline at end of file diff --git a/dojo/forms.py b/dojo/forms.py index cbd04765c9b..d56cd1ebad2 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -4,6 +4,7 @@ import re import warnings from datetime import date, datetime +from pathlib import Path import tagulous from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios @@ -31,6 +32,7 @@ import dojo.jira_link.helper as jira_helper from dojo.authorization.roles_permissions import Permissions from dojo.endpoint.utils import endpoint_filter, endpoint_get_or_create, validate_endpoints_to_add +from dojo.engagement.queries import get_authorized_engagements from dojo.finding.queries import get_authorized_findings from dojo.group.queries import get_authorized_groups, get_group_member_roles from dojo.models import ( @@ -71,6 +73,7 @@ JIRA_Project, Note_Type, Notes, + Notification_Webhooks, Notifications, Objects_Product, Product, @@ -138,6 +141,7 @@ def render(self, name, *args, **kwargs): class MonthYearWidget(Widget): + """ A Widget that splits date input into two + + + +{% endblock %} \ No newline at end of file diff --git a/dojo/templates/defectDojo-engagement-survey/surveys.html b/dojo/templates/defectDojo-engagement-survey/surveys.html index 1b9d1421340..ae0487c9752 100644 --- a/dojo/templates/defectDojo-engagement-survey/surveys.html +++ b/dojo/templates/defectDojo-engagement-survey/surveys.html @@ -40,6 +40,13 @@ Assign User {% endif %} + {% if survey.engagement|has_object_permission:"Engagement_Edit" %} +
  • + + Link to a Different Engagement + +
  • + {% endif %}
  • Add a new Notification Webhook +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + +
    +
    +
    +{% endblock %} diff --git a/dojo/templates/dojo/add_related.html b/dojo/templates/dojo/add_related.html index f6c74e1c530..682df6045ef 100644 --- a/dojo/templates/dojo/add_related.html +++ b/dojo/templates/dojo/add_related.html @@ -29,7 +29,7 @@ - + diff --git a/dojo/templates/dojo/api_v2_key.html b/dojo/templates/dojo/api_v2_key.html index 71b9dd2d620..6b4d56e9338 100644 --- a/dojo/templates/dojo/api_v2_key.html +++ b/dojo/templates/dojo/api_v2_key.html @@ -15,9 +15,11 @@

    {{ name }}


    + {% if API_TOKEN_AUTH_ENDPOINT_ENABLED %}

    {% trans "Alternatively, you can use /api/v2/api-token-auth/ to get your token. Example:" %}

     curl -X POST -H 'content-type: application/json' {% if request.is_secure %}https{% else %}http{% endif %}://{{ request.META.HTTP_HOST }}/api/v2/api-token-auth/ -d '{"username": "<YOURUSERNAME>", "password": "<YOURPASSWORD>"}'
    + {% endif %}

    {% trans "To use your API Key you need to specify an Authorization header. Example:" %}

     # As a header
    diff --git a/dojo/templates/dojo/custom_html_report_endpoint_list.html b/dojo/templates/dojo/custom_html_report_endpoint_list.html
    index e259a2d4b46..aca9cd3bef9 100644
    --- a/dojo/templates/dojo/custom_html_report_endpoint_list.html
    +++ b/dojo/templates/dojo/custom_html_report_endpoint_list.html
    @@ -151,7 +151,7 @@ 
    References
    {{ finding.references|markdown_render }}
    {% endif %} {% if include_finding_images %} - {% include "dojo/snippets/file_images.html" with size='original' obj=finding format="HTML" %} + {% include "dojo/snippets/file_images.html" with size='original' obj=finding format="INLINE" %} {% endif %} {% if include_finding_notes %} {% with notes=finding.notes.all|get_public_notes %} diff --git a/dojo/templates/dojo/custom_html_report_finding_list.html b/dojo/templates/dojo/custom_html_report_finding_list.html index f92d180c9b3..13f33d03dca 100644 --- a/dojo/templates/dojo/custom_html_report_finding_list.html +++ b/dojo/templates/dojo/custom_html_report_finding_list.html @@ -154,7 +154,7 @@
    References
    {% endif %} {% if include_finding_images %} - {% include "dojo/snippets/file_images.html" with size='original' obj=finding format="HTML" %} + {% include "dojo/snippets/file_images.html" with size='original' obj=finding format="INLINE" %} {% endif %} {% if include_finding_notes %} {% with notes=finding.notes.all|get_public_notes %} diff --git a/dojo/templates/dojo/dashboard.html b/dojo/templates/dojo/dashboard.html index 8e049086094..57df4e1919a 100644 --- a/dojo/templates/dojo/dashboard.html +++ b/dojo/templates/dojo/dashboard.html @@ -208,6 +208,7 @@
    {% trans "View Responses" %} {% trans "Create Engagement" %} + {% trans "Link to Existing Engagement" %} {% endif %} diff --git a/dojo/templates/dojo/delete_notification_webhook.html b/dojo/templates/dojo/delete_notification_webhook.html new file mode 100644 index 00000000000..f196ad94fc9 --- /dev/null +++ b/dojo/templates/dojo/delete_notification_webhook.html @@ -0,0 +1,12 @@ +{% extends "base.html" %} +{% block content %} +

    Delete Notification Webhook

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + +
    +
    +
    +{% endblock %} diff --git a/dojo/templates/dojo/edit_notification_webhook.html b/dojo/templates/dojo/edit_notification_webhook.html new file mode 100644 index 00000000000..94bd56c2307 --- /dev/null +++ b/dojo/templates/dojo/edit_notification_webhook.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} + {% block content %} + {{ block.super }} +

    Edit Notification Webhook

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + + +
    +
    +
    + {% endblock %} + \ No newline at end of file diff --git a/dojo/templates/dojo/express_new_jira.html b/dojo/templates/dojo/express_new_jira.html deleted file mode 100644 index 4394c5d6bbc..00000000000 --- a/dojo/templates/dojo/express_new_jira.html +++ /dev/null @@ -1,16 +0,0 @@ -{% extends "base.html"%} -{% block content %} - {{ block.super }} -

    Add a JIRA Configuration Express

    -
    {% csrf_token %} - {% include "dojo/form_fields.html" with form=jform %} -
    -
    - -
    -

    - Finding severity mappings and other options can be edited after express configuration is complete. -
    -
    -
    -{% endblock %} diff --git a/dojo/templates/dojo/findings_list_snippet.html b/dojo/templates/dojo/findings_list_snippet.html index eb0c2d7135f..51e85ab8d1b 100644 --- a/dojo/templates/dojo/findings_list_snippet.html +++ b/dojo/templates/dojo/findings_list_snippet.html @@ -384,6 +384,11 @@

    {% trans "Planned Remediation" %} + {% if filter_name != 'Closed' %} + + {% trans "Reviewers" %} + + {% endif %} {% endblock header %} @@ -699,6 +704,16 @@

    {% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} + {% if filter_name != 'Closed' %} + + {% if finding.reviewers %} + {% for reviewer in finding.reviewers.all %} + {{reviewer.get_full_name}} + {% if not forloop.last %}
    {% endif %} + {% endfor %} + {% endif %} + + {% endif %} {% endblock body %} {% endfor %} @@ -779,6 +794,9 @@

    {% endif %} { "data": "service" }, { "data": "planned_remediation_date" }, + {% if filter_name != 'Closed' %} + { "data": "reviewers" }, + {% endif %} ]; {% endblock datatables_columns %} diff --git a/dojo/templates/dojo/jira.html b/dojo/templates/dojo/jira.html index a3208648d64..1068cf7c4ca 100644 --- a/dojo/templates/dojo/jira.html +++ b/dojo/templates/dojo/jira.html @@ -19,13 +19,13 @@

    diff --git a/dojo/templates/dojo/new_jira.html b/dojo/templates/dojo/new_jira.html index 232117681cb..6f4cb6e055e 100644 --- a/dojo/templates/dojo/new_jira.html +++ b/dojo/templates/dojo/new_jira.html @@ -6,8 +6,11 @@

    Add a JIRA Configuration

    {% include "dojo/form_fields.html" with form=jform %}
    - + +
    +

    + Finding severity mappings and other options can be edited after configuration is complete.
    -{% endblock %} \ No newline at end of file +{% endblock %} diff --git a/dojo/templates/dojo/new_jira_advanced.html b/dojo/templates/dojo/new_jira_advanced.html new file mode 100644 index 00000000000..2af3a37c600 --- /dev/null +++ b/dojo/templates/dojo/new_jira_advanced.html @@ -0,0 +1,13 @@ +{% extends "base.html"%} +{% block content %} + {{ block.super }} +

    Add a JIRA Configuration (Advanced)

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=jform %} +
    +
    + +
    +
    +
    +{% endblock %} \ No newline at end of file diff --git a/dojo/templates/dojo/notifications.html b/dojo/templates/dojo/notifications.html index 52d87393c45..81fac49d5cc 100644 --- a/dojo/templates/dojo/notifications.html +++ b/dojo/templates/dojo/notifications.html @@ -89,6 +89,9 @@

    {% if 'mail' in enabled_notifications %} {% trans "Mail" %} {% endif %} + {% if 'webhooks' in enabled_notifications %} + {% trans "Webhooks" %} + {% endif %} {% trans "Alert" %} diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index 1b7f50a73df..0470bd8357a 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -122,18 +122,20 @@

    Edit Custom Fields

  • - -
  • - - Add Scan API Configuration - -
  • + {% endif %} + + {% if prod|has_object_permission:"Product_API_Scan_Configuration_Edit" %}
  • - - View Scan API Configurations - + + Add Scan API Configuration +
  • {% endif %} +
  • + + View Scan API Configurations + +
  • {% if system_settings.enable_product_tracking_files %} {% if prod|has_object_permission:"Product_Tracking_Files_Add" %} diff --git a/dojo/templates/dojo/report_cover_page.html b/dojo/templates/dojo/report_cover_page.html index 8e936cd9618..0130d08f845 100644 --- a/dojo/templates/dojo/report_cover_page.html +++ b/dojo/templates/dojo/report_cover_page.html @@ -6,7 +6,7 @@
     

    - + DefectDojo Logo

    {{ report_title }}

    diff --git a/dojo/templates/dojo/snippets/file_images.html b/dojo/templates/dojo/snippets/file_images.html index 1c7481e9162..8a559282e03 100644 --- a/dojo/templates/dojo/snippets/file_images.html +++ b/dojo/templates/dojo/snippets/file_images.html @@ -9,6 +9,15 @@
    Images

    No images found.

    {% endfor %} {% endwith %} +{% elif format == "INLINE" %} + {% with images=obj|file_images %} +
    Images
    + {% for pic in images %} +

    Finding Image

    + {% empty %} +

    No images found.

    + {% endfor %} + {% endwith %} {% else %} {% with images=obj|file_images %} {% for pic in images %} diff --git a/dojo/templates/dojo/support.html b/dojo/templates/dojo/support.html index fd0a49a095b..45066a551f9 100644 --- a/dojo/templates/dojo/support.html +++ b/dojo/templates/dojo/support.html @@ -14,24 +14,68 @@

    Community Support

    What's included:

    Support from the community via OWASP Slack

    -

    Community based discussion

    +

    Community-based discussion

    Join #defectdojo
    -

    Get DefectDojo Pro

    +

    Go Pro!

    What's included:

    -

    Support directly from the creators

    -

    Additional features

    -

    Response time SLA

    -

    Bug fixes

    -

    Feature enhancements

    -

    Best practice advice

    +

    New UI + + +

    +

    Connectors + + +

    +

    Insights + + +

    +

    Data Enrichment + + +

    +

    Universal Importer + + +

    +

    Async Functions + + +

    +

    Support directly from the DefectDojo Team

    +

    Assistance with best practice and implementation

    - Meet The Creators + Go Pro Now
    diff --git a/dojo/templates/dojo/system_settings.html b/dojo/templates/dojo/system_settings.html index 693abe712f0..02510452e16 100644 --- a/dojo/templates/dojo/system_settings.html +++ b/dojo/templates/dojo/system_settings.html @@ -62,7 +62,7 @@

    System Settings

    } $(function () { - $.each(['slack','msteams','mail', 'grade'], function (index, value) { + $.each(['slack','msteams','mail','webhooks','grade'], function (index, value) { updatenotificationsgroup(value); $('#id_enable_' + value + '_notifications').change(function() { updatenotificationsgroup(value)}); }); diff --git a/dojo/templates/dojo/view_notification_webhooks.html b/dojo/templates/dojo/view_notification_webhooks.html new file mode 100644 index 00000000000..6b02c0888d3 --- /dev/null +++ b/dojo/templates/dojo/view_notification_webhooks.html @@ -0,0 +1,101 @@ +{% extends "base.html" %} +{% load navigation_tags %} +{% load display_tags %} +{% load i18n %} +{% load authorization_tags %} +{% block content %} + {{ block.super }} +
    +
    +
    +
    +

    + Notification Webhook List + +

    +
    + +
    + {% if nwhs %} + +
    + {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
    + +
    + + + + + + + + + + + + {% for nwh in nwhs %} + + + + + + + {% if "dojo.edit_notification_webhook"|has_configuration_permission:request %} + + {% endif %} + + {% endfor %} + +
    {% dojo_sort request 'Notification Webhook Name' 'name' 'asc' %}URLStatusNoteOwner
    {{ nwh.name }}{{ nwh.url }}{{ nwh.get_status_display }} + {% if nwh.first_error or nwh.last_error %} + + {% endif %} + {{ nwh.note }}{% if nwh.owner %}{{ nwh.owner }}{% else %}System Webhook{% endif %} + +
    +
    +
    + {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
    + {% else %} +

    No Notification Webook found.

    + {% endif %} +
    +
    +{% endblock %} +{% block postscript %} + {{ block.super }} + {% include "dojo/filter_js_snippet.html" %} +{% endblock %} diff --git a/dojo/templates/dojo/view_product_details.html b/dojo/templates/dojo/view_product_details.html index b9c5a067fe8..3f7ea62ce32 100644 --- a/dojo/templates/dojo/view_product_details.html +++ b/dojo/templates/dojo/view_product_details.html @@ -41,19 +41,21 @@

    {% trans "Description" %}

    {% trans "Edit Custom Fields" %} - -
  • - - {% trans "Add API Scan Configuration" %} - -
  • + {% endif %} + + {% if prod|has_object_permission:"Product_API_Scan_Configuration_Add" %}
  • - - {% trans "View API Scan Configurations" %} - + + {% trans "Add API Scan Configuration" %} +
  • {% endif %} +
  • + + {% trans "View API Scan Configurations" %} + +
  • {% if system_settings.enable_product_tracking_files %} {% if prod|has_object_permission:"Product_Tracking_Files_Add" %} @@ -295,7 +297,7 @@

    {% trans "Members" %}

    {% endif %} - {% if product_members or product_type_members %} + {% if product_members or product_type_members or global_product_members %}
    @@ -348,6 +350,15 @@

    {% trans "Members" %}

    {% endfor %} + {% for member in global_product_members %} + + + + + + + {% endfor %}
    {{ member.role }}
    + {{ member.user.get_full_name }}Global role{{ member.role }}
    @@ -383,7 +394,7 @@

    {% trans "Groups" %}

    {% endif %} - {% if product_groups or product_type_groups %} + {% if product_groups or product_type_groups or global_product_groups %}
    @@ -435,6 +446,14 @@

    {% trans "Groups" %}

    {% endfor %} + {% for type_group in global_product_groups %} + + + + + + + {% endfor %}
    {{ type_group.role }}
    {{ type_group.group.name }}Global role{{ type_group.role }}
    @@ -461,7 +480,7 @@

    {% trans "Product Type" %} - {{ prod.prod_type|notspecified }} + {{ prod.prod_type }} {% trans "Platform" %} @@ -668,7 +687,7 @@