diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 713480dd33d..ba1ba50d658 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -36,7 +36,7 @@ A clear and concise description of what you expected to happen. - DefectDojo version (see footer) or commit message: [use `git show -s --format="[%ci] %h: %s [%d]"`] **Logs** -Use `docker-compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). +Use `docker compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). **Sample scan files** If applicable, add sample scan files to help reproduce your problem. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md index 7eda2a58dea..4dc3873471f 100644 --- a/.github/ISSUE_TEMPLATE/support_request.md +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -36,7 +36,7 @@ A clear and concise description of what you expected to happen. - DefectDojo version (see footer) or commit message: [use `git show -s --format="[%ci] %h: %s [%d]"`] **Logs** -Use `docker-compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). +Use `docker compose logs` (or similar, depending on your deployment method) to get the logs and add the relevant sections here showing the error occurring (if applicable). **Sample scan files** If applicable, add sample scan files to help reproduce your problem. diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml index a8a570a9f8c..c5753973ae2 100644 --- a/.github/workflows/build-docker-images-for-testing.yml +++ b/.github/workflows/build-docker-images-for-testing.yml @@ -45,9 +45,7 @@ jobs: tags: defectdojo/defectdojo-${{ matrix.docker-image }}:${{ matrix.os }} file: Dockerfile.${{ matrix.docker-image }}-${{ matrix.os }} outputs: type=docker,dest=${{ matrix.docker-image }}-${{ matrix.os }}_img - cache-from: type=gha,scope=${{ matrix.docker-image }} - cache-to: type=gha,mode=max,scope=${{ matrix.docker-image }} - + # export docker images to be used in next jobs below - name: Upload image ${{ matrix.docker-image }} as artifact timeout-minutes: 10 diff --git a/.github/workflows/release-3-master-into-dev.yml b/.github/workflows/release-3-master-into-dev.yml index b5c8828ee16..cbd287d232e 100644 --- a/.github/workflows/release-3-master-into-dev.yml +++ b/.github/workflows/release-3-master-into-dev.yml @@ -50,11 +50,15 @@ jobs: CURRENT_CHART_VERSION=$(grep -oP 'version: (\K\S*)?' helm/defectdojo/Chart.yaml | head -1) sed -ri "0,/version/s/version: \S+/$(echo "version: $CURRENT_CHART_VERSION" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{$NF=sprintf("%0*d", length($NF), ($NF+1)); print}')-dev/" helm/defectdojo/Chart.yaml + - name: Update settings SHA + run: sha256sum dojo/settings/settings.dist.py | cut -d ' ' -f1 > dojo/settings/.settings.dist.py.sha256sum + - name: Check numbers run: | grep version dojo/__init__.py grep appVersion helm/defectdojo/Chart.yaml grep version components/package.json + cat dojo/settings/.settings.dist.py.sha256sum - name: Create upgrade notes to documentation run: | @@ -132,11 +136,15 @@ jobs: CURRENT_CHART_VERSION=$(grep -oP 'version: (\K\S*)?' helm/defectdojo/Chart.yaml | head -1) sed -ri "0,/version/s/version: \S+/$(echo "version: $CURRENT_CHART_VERSION" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{$NF=sprintf("%0*d", length($NF), ($NF+1)); print}')-dev/" helm/defectdojo/Chart.yaml + - name: Update settings SHA + run: sha256sum dojo/settings/settings.dist.py | cut -d ' ' -f1 > dojo/settings/.settings.dist.py.sha256sum + - name: Check numbers run: | grep version dojo/__init__.py grep appVersion helm/defectdojo/Chart.yaml grep version components/package.json + cat dojo/settings/.settings.dist.py.sha256sum - name: Push version changes uses: stefanzweifel/git-auto-commit-action@v5.0.1 diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index bae585d2388..6f8862b6216 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -49,18 +49,6 @@ jobs: id: buildx uses: docker/setup-buildx-action@v3 - - name: Cache Docker layers - uses: actions/cache@v4 - env: - docker-image: ${{ matrix.docker-image }} - with: - path: /tmp/.buildx-cache-${{ env.docker-image }} - key: ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name }}-${{ github.sha }}-${{ github.run_id }} - restore-keys: | - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name}}-${{ github.sha }} - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}-${{ env.workflow_name }} - ${{ runner.os }}-buildx-${{ env.docker-image }}-${{ matrix.os }}- - - name: Build and push images with debian if: ${{ matrix.os == 'debian' }} uses: docker/build-push-action@v6 @@ -73,8 +61,6 @@ jobs: tags: ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}-${{ matrix.os }}, ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}, ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:latest file: ./Dockerfile.${{ env.docker-image }}-${{ matrix.os }} context: . - cache-from: type=local,src=/tmp/.buildx-cache-${{ env.docker-image }} - cache-to: type=local,dest=/tmp/.buildx-cache-${{ env.docker-image }} - name: Build and push images with alpine if: ${{ matrix.os == 'alpine' }} @@ -88,9 +74,3 @@ jobs: tags: ${{ env.REPO_ORG }}/defectdojo-${{ env.docker-image}}:${{ github.event.inputs.release_number }}-${{ matrix.os }} file: ./Dockerfile.${{ env.docker-image }}-${{ matrix.os }} context: . - cache-from: type=local,src=/tmp/.buildx-cache-${{ env.docker-image }} - cache-to: type=local,dest=/tmp/.buildx-cache-${{ env.docker-image }} -# platforms: ${{ matrix.platform }} - - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 907ecf92968..f153a368ba9 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -34,8 +34,8 @@ jobs: run: docker/setEnv.sh unit_tests_cicd # phased startup so we can use the exit code from unit test container - - name: Start Postgres - run: docker compose up -d postgres + - name: Start Postgres and webhook.endpoint + run: docker compose up -d postgres webhook.endpoint # no celery or initializer needed for unit tests - name: Unit tests diff --git a/components/package.json b/components/package.json index b1a047f22bc..e5df589df37 100644 --- a/components/package.json +++ b/components/package.json @@ -35,7 +35,7 @@ "metismenu": "~3.0.7", "moment": "^2.30.1", "morris.js": "morrisjs/morris.js", - "pdfmake": "^0.2.12", + "pdfmake": "^0.2.13", "startbootstrap-sb-admin-2": "1.0.7" }, "engines": { diff --git a/components/yarn.lock b/components/yarn.lock index 8bd8311e89b..7bb19365790 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -824,10 +824,10 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -pdfmake@^0.2.12: - version "0.2.12" - resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.12.tgz#5156f91ff73797947942aa342423bedaa0c0bc93" - integrity sha512-TFsqaG6KVtk+TWermmJNNwom3wmB/xiz07prM74KBhdM+7pz3Uwq2b0uoqhhQRn6cYUTpL8lXZY6xF011o1YcQ== +pdfmake@^0.2.13: + version "0.2.13" + resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.13.tgz#ea43fe9f0c8de1e5ec7b08486d6f4f8bbb8619e4" + integrity sha512-qeVE9Bzjm0oPCitH4/HYM/XCGTwoeOAOVAXPnV3s0kpPvTLkTF/bAF4jzorjkaIhXGQhzYk6Xclt0hMDYLY93w== dependencies: "@foliojs-fork/linebreak" "^1.1.1" "@foliojs-fork/pdfkit" "^0.14.0" diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index f3a281af061..581dd627900 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -53,3 +53,5 @@ services: published: 8025 protocol: tcp mode: host + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a diff --git a/docker-compose.override.unit_tests.yml b/docker-compose.override.unit_tests.yml index 164d7a87084..7c32e179386 100644 --- a/docker-compose.override.unit_tests.yml +++ b/docker-compose.override.unit_tests.yml @@ -51,6 +51,8 @@ services: redis: image: busybox:1.36.1-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker-compose.override.unit_tests_cicd.yml b/docker-compose.override.unit_tests_cicd.yml index b39f4cf034d..64af8ac79aa 100644 --- a/docker-compose.override.unit_tests_cicd.yml +++ b/docker-compose.override.unit_tests_cicd.yml @@ -50,6 +50,8 @@ services: redis: image: busybox:1.36.1-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker/docker-compose-check.sh b/docker/docker-compose-check.sh index b51cf45674f..d24419de2ee 100755 --- a/docker/docker-compose-check.sh +++ b/docker/docker-compose-check.sh @@ -6,11 +6,11 @@ current=$(docker compose version --short) echo 'Checking docker compose version' if [[ $main -lt 2 ]]; then - echo "$current is not a supported docker-compose version, please upgrade to the minimum supported version: 2.0" + echo "$current is not a supported 'docker compose' version, please upgrade to the minimum supported version: 2.0" exit 1 elif [[ $main -eq 1 ]]; then if [[ $minor -lt 28 ]]; then - echo "$current is not supported docker-compose version, please upgrade to minimal supported version:1.28" + echo "$current is not supported 'docker compose' version, please upgrade to minimal supported version:1.28" exit 1 fi fi diff --git a/docker/extra_settings/README.md b/docker/extra_settings/README.md index e919e1917bc..b3a8fc0eddb 100644 --- a/docker/extra_settings/README.md +++ b/docker/extra_settings/README.md @@ -6,7 +6,7 @@ If a file if placed here, it will be copied on startup to `dojo/settings/local_s For an example, see [template-local_settings](../../dojo/settings/template-local_settings) Please note this copy action could fail if you have mounted the full `dojo/` folder, but that is owned by a different user/group. -That's why this copy action only happens in docker-compose release mode, and not in dev/debug/unit_tests/integration_tests modes. +That's why this copy action only happens in docker compose release mode, and not in dev/debug/unit_tests/integration_tests modes. For advanced usage you can also place a `settings.dist.py` or `settings.py` file. These will also be copied on startup to dojo/settings. diff --git a/docs/content/en/contributing/how-to-write-a-parser.md b/docs/content/en/contributing/how-to-write-a-parser.md index 7495f7ba886..c87846cb620 100644 --- a/docs/content/en/contributing/how-to-write-a-parser.md +++ b/docs/content/en/contributing/how-to-write-a-parser.md @@ -15,7 +15,7 @@ All commands assume that you're located at the root of the django-DefectDojo clo - Checkout `dev` and make sure you're up to date with the latest changes. - It's advised that you create a dedicated branch for your development, such as `git checkout -b parser-name`. -It is easiest to use the docker-compose deployment as it has hot-reload capbility for uWSGI. +It is easiest to use the docker compose deployment as it has hot-reload capbility for uWSGI. Set up your environment to use the debug environment: `$ docker/setEnv.sh debug` @@ -27,7 +27,7 @@ Please have a look at [DOCKER.md](https://github.com/DefectDojo/django-DefectDoj You will want to build your docker images locally, and eventually pass in your local user's `uid` to be able to write to the image (handy for database migration files). Assuming your user's `uid` is `1000`, then: {{< highlight bash >}} -$ docker-compose build --build-arg uid=1000 +$ docker compose build --build-arg uid=1000 {{< /highlight >}} ## Which files do you need to modify? @@ -279,7 +279,7 @@ This ensures the file is closed at the end of the with statement, even if an exc ### Test database -To test your unit tests locally, you first need to grant some rights. Get your MySQL root password from the docker-compose logs, login as root and issue the following commands: +To test your unit tests locally, you first need to grant some rights. Get your MySQL root password from the docker compose logs, login as root and issue the following commands: {{< highlight mysql >}} MYSQL> grant all privileges on test_defectdojo.* to defectdojo@'%'; @@ -291,17 +291,17 @@ MYSQL> flush privileges; This local command will launch the unit test for your new parser {{< highlight bash >}} -$ docker-compose exec uwsgi bash -c 'python manage.py test unittests.tools.. -v2' +$ docker compose exec uwsgi bash -c 'python manage.py test unittests.tools.. -v2' {{< /highlight >}} Example for the blackduck hub parser: {{< highlight bash >}} -$ docker-compose exec uwsgi bash -c 'python manage.py test unittests.tools.test_blackduck_csv_parser.TestBlackduckHubParser -v2' +$ docker compose exec uwsgi bash -c 'python manage.py test unittests.tools.test_blackduck_csv_parser.TestBlackduckHubParser -v2' {{< /highlight >}} {{% alert title="Information" color="info" %}} -If you want to run all unit tests, simply run `$ docker-compose exec uwsgi bash -c 'python manage.py test unittests -v2'` +If you want to run all unit tests, simply run `$ docker compose exec uwsgi bash -c 'python manage.py test unittests -v2'` {{% /alert %}} ### Endpoint validation @@ -330,7 +330,7 @@ In the event where you'd have to change the model, e.g. to increase a database c * Create a new migration file in dojo/db_migrations by running and including as part of your PR {{< highlight bash >}} - $ docker-compose exec uwsgi bash -c 'python manage.py makemigrations -v2' + $ docker compose exec uwsgi bash -c 'python manage.py makemigrations -v2' {{< /highlight >}} ### Accept a different type of file to upload diff --git a/docs/content/en/getting_started/running-in-production.md b/docs/content/en/getting_started/running-in-production.md index 6da16d253b7..4074acb8df0 100644 --- a/docs/content/en/getting_started/running-in-production.md +++ b/docs/content/en/getting_started/running-in-production.md @@ -5,7 +5,7 @@ draft: false weight: 4 --- -## Production use with docker-compose +## Production use with docker compose The docker-compose.yml file in this repository is fully functional to evaluate DefectDojo in your local environment. @@ -76,7 +76,7 @@ Dockerfile.django-* for in-file references. You can execute the following command to see the configuration: -`docker-compose exec celerybeat bash -c "celery -A dojo inspect stats"` +`docker compose exec celerybeat bash -c "celery -A dojo inspect stats"` and see what is in effect. #### Asynchronous Import diff --git a/docs/content/en/getting_started/upgrading/2.23.md b/docs/content/en/getting_started/upgrading/2.23.md index 5ebcc4edc61..5525d10ce01 100644 --- a/docs/content/en/getting_started/upgrading/2.23.md +++ b/docs/content/en/getting_started/upgrading/2.23.md @@ -16,6 +16,6 @@ There is a migration process built into the upgrade that will automatically conv - If your deployment uses the MySQL containerized database, please see the following updates to run DefectDojo: - Use of the helper script "dc-up": `./dc-up.sh mysql-rabbitmq` or `./dc-up.sh mysql-redis` - Use of the helper script "dc-up-d": `./dc-up-d.sh mysql-rabbitmq` or `./dc-up-d.sh mysql-redis` - - Use of Docker Compose directly: `docker-compose --profile mysql-rabbitmq --env-file ./docker/environments/mysql-rabbitmq.env up` or `docker-compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env up` + - Use of Docker Compose directly: `docker compose --profile mysql-rabbitmq --env-file ./docker/environments/mysql-rabbitmq.env up` or `docker compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env up` For all other changes, check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.23.0) for the contents of the release. diff --git a/docs/content/en/getting_started/upgrading/2.30.md b/docs/content/en/getting_started/upgrading/2.30.md index 6029febd302..b2a0bc66087 100644 --- a/docs/content/en/getting_started/upgrading/2.30.md +++ b/docs/content/en/getting_started/upgrading/2.30.md @@ -10,7 +10,7 @@ There are instructions for upgrading to 2.30.0 if you disabled `enable_auditlog` Parameter `enable_auditlog` is not possible to set through System settings anymore. If you set this parameter or you need to change it to `False` (to disable audit logging), set environmental variable `DD_ENABLE_AUDITLOG` to `False`. -If you are using docker-compose, another EnvVar should be added to the `docker-compose.yml` file in all the containers ran by the django image. This should do the trick +If you are using docker compose, another EnvVar should be added to the `docker-compose.yml` file in all the containers ran by the django image. This should do the trick ```yaml DD_ENABLE_AUDITLOG: ${DD_ENABLE_AUDITLOG:-False} ``` diff --git a/docs/content/en/getting_started/upgrading/_index.md b/docs/content/en/getting_started/upgrading/_index.md index 9a57986deea..a7f5aa30906 100644 --- a/docs/content/en/getting_started/upgrading/_index.md +++ b/docs/content/en/getting_started/upgrading/_index.md @@ -5,9 +5,9 @@ draft: false weight: 5 --- -## Docker-compose +## Docker compose -When you deploy a vanilla docker-compose, it will create a persistent +When you deploy a vanilla docker compose, it will create a persistent volume for your Postgres database. As long as your volume is there, you should not lose any data. @@ -19,7 +19,7 @@ DockerHub to update. {{% /alert %}} -The generic upgrade method for docker-compose are as follows: +The generic upgrade method for docker compose are as follows: - Pull the latest version ``` {.sourceCode .bash} @@ -46,10 +46,10 @@ The generic upgrade method for docker-compose are as follows: - Re-start DefectDojo, allowing for container recreation: `./dc-up-d.sh` - Database migrations will be run automatically by the initializer. - Check the output via `docker-compose logs initializer` or relevant k8s command + Check the output via `docker compose logs initializer` or relevant k8s command - If you have the initializer disabled (or if you want to be on the safe side), run the migration command: - `docker-compose exec uwsgi /bin/bash -c "python manage.py migrate"` + `docker compose exec uwsgi /bin/bash -c "python manage.py migrate"` ### Building your local images @@ -64,7 +64,7 @@ first. git merge origin/master ``` -Then replace the first step of the above generic upgrade method for docker-compose with: `docker-compose build` +Then replace the first step of the above generic upgrade method for docker compose with: `docker compose build` ## godojo installations diff --git a/docs/content/en/integrations/burp-plugin.md b/docs/content/en/integrations/burp-plugin.md index 400b37c0f2a..ab3285ceda4 100644 --- a/docs/content/en/integrations/burp-plugin.md +++ b/docs/content/en/integrations/burp-plugin.md @@ -2,7 +2,7 @@ title: "Defect Dojo Burp plugin" description: "Export findings directly from Burp to DefectDojo." draft: false -weight: 8 +weight: 9 --- **Please note: The DefectDojo Burp Plugin has been sunset and is no longer a supported feature.** diff --git a/docs/content/en/integrations/exporting.md b/docs/content/en/integrations/exporting.md index da17df7d93b..7a42d27b17e 100644 --- a/docs/content/en/integrations/exporting.md +++ b/docs/content/en/integrations/exporting.md @@ -2,7 +2,7 @@ title: "Exporting" description: "DefectDojo has the ability to export findings." draft: false -weight: 11 +weight: 12 --- diff --git a/docs/content/en/integrations/google-sheets-sync.md b/docs/content/en/integrations/google-sheets-sync.md index b6e97f72f84..456a694fc6e 100644 --- a/docs/content/en/integrations/google-sheets-sync.md +++ b/docs/content/en/integrations/google-sheets-sync.md @@ -2,7 +2,7 @@ title: "Google Sheets synchronisation" description: "Export finding details to Google Sheets and upload changes from Google Sheets." draft: false -weight: 7 +weight: 8 --- **Please note - the Google Sheets feature has been deprecated as of DefectDojo version 2.21.0 - these documents are for reference only.** diff --git a/docs/content/en/integrations/jira.md b/docs/content/en/integrations/jira.md index e7a19329bd4..b6bc83fe206 100644 --- a/docs/content/en/integrations/jira.md +++ b/docs/content/en/integrations/jira.md @@ -167,19 +167,19 @@ optional arguments: This can be executed from the uwsgi docker container using: {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation' {{< /highlight >}} DEBUG output can be obtains via `-v 3`, but only after increasing the logging to DEBUG level in your settings.dist.py or local_settings.py file {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation -v 3' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation -v 3' {{< /highlight >}} At the end of the command a semicolon seperated CSV summary will be printed. This can be captured by redirecting stdout to a file: {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation > jira_reconciliation.csv' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py jira_status_reconciliation > jira_reconciliation.csv' {{< /highlight >}} diff --git a/docs/content/en/integrations/languages.md b/docs/content/en/integrations/languages.md index 17a322c8f90..a78ed137e69 100644 --- a/docs/content/en/integrations/languages.md +++ b/docs/content/en/integrations/languages.md @@ -2,7 +2,7 @@ title: "Languages and lines of code" description: "You can import an analysis of languages used in a project, including lines of code." draft: false -weight: 9 +weight: 10 --- ## Import of languages for a project diff --git a/docs/content/en/integrations/ldap-authentication.md b/docs/content/en/integrations/ldap-authentication.md index 17697043736..307f1029a0a 100644 --- a/docs/content/en/integrations/ldap-authentication.md +++ b/docs/content/en/integrations/ldap-authentication.md @@ -116,7 +116,7 @@ Read the docs for Django Authentication with LDAP here: https://django-auth-ldap #### docker-compose.yml -In order to pass the variables to the settings.dist.py file via docker, it's a good idea to add these to the docker-compose file. +In order to pass the variables to the settings.dist.py file via docker, it's a good idea to add these to the docker compose file. You can do this by adding the following variables to the environment section for the uwsgi image: ```yaml diff --git a/docs/content/en/integrations/notification_webhooks/_index.md b/docs/content/en/integrations/notification_webhooks/_index.md new file mode 100644 index 00000000000..d8fe606cffa --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/_index.md @@ -0,0 +1,79 @@ +--- +title: "Notification Webhooks (experimental)" +description: "How to setup and use webhooks" +weight: 7 +chapter: true +--- + +Webhooks are HTTP requests coming from the DefectDojo instance towards user-defined webserver which expects this kind of incoming traffic. + +## Transition graph: + +It is not unusual that in some cases webhook can not be performed. It is usually connected to network issues, server misconfiguration, or running upgrades on the server. DefectDojo needs to react to these outages. It might temporarily or permanently disable related endpoints. The following graph shows how it might change the status of the webhook definition based on HTTP responses (or manual user interaction). + +```mermaid +flowchart TD + + START{{Endpoint created}} + ALL{All states} + STATUS_ACTIVE([STATUS_ACTIVE]) + STATUS_INACTIVE_TMP + STATUS_INACTIVE_PERMANENT + STATUS_ACTIVE_TMP([STATUS_ACTIVE_TMP]) + END{{Endpoint removed}} + + START ==> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 200 or 201 --> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 5xx
or HTTP 429
or Timeout--> STATUS_INACTIVE_TMP + STATUS_ACTIVE --Any HTTP 4xx response
or any other HTTP response
or non-HTTP error--> STATUS_INACTIVE_PERMANENT + STATUS_INACTIVE_TMP -.After 60s.-> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h
from the first error-->STATUS_INACTIVE_TMP + STATUS_ACTIVE_TMP -.After 24h.-> STATUS_ACTIVE + STATUS_ACTIVE_TMP --HTTP 200 or 201 --> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h from the first error
or any other HTTP response or error--> STATUS_INACTIVE_PERMANENT + ALL ==Activation by user==> STATUS_ACTIVE + ALL ==Deactivation by user==> STATUS_INACTIVE_PERMANENT + ALL ==Removal of endpoint by user==> END +``` + +Notes: + +1. Transitions: + - bold: manual changes by user + - dotted: automated by celery + - others: based on responses on webhooks +1. Nodes: + - Stadium-shaped: Active - following webhook can be sent + - Rectangles: Inactive - performing of webhook will fail (and not retried) + - Hexagonal: Initial and final states + - Rhombus: All states (meta node to make the graph more readable) + +## Body and Headers + +The body of each request is JSON which contains data about related events like names and IDs of affected elements. +Examples of bodies are on pages related to each event (see below). + +Each request contains the following headers. They might be useful for better handling of events by server this process events. + +```yaml +User-Agent: DefectDojo- +X-DefectDojo-Event: +X-DefectDojo-Instance: +``` +## Disclaimer + +This functionality is new and in experimental mode. This means Functionality might generate breaking changes in following DefectDojo releases and might not be considered final. + +However, the community is open to feedback to make this functionality better and transform it stable as soon as possible. + +## Roadmap + +There are a couple of known issues that are expected to be implemented as soon as core functionality is considered ready. + +- Support events - Not only adding products, product types, engagements, tests, or upload of new scans but also events around SLA +- User webhook - right now only admins can define webhooks; in the future also users will be able to define their own +- Improvement in UI - add filtering and pagination of webhook endpoints + +## Events + + \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/engagement_added.md b/docs/content/en/integrations/notification_webhooks/engagement_added.md new file mode 100644 index 00000000000..64fd7746ec2 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/engagement_added.md @@ -0,0 +1,38 @@ +--- +title: "Event: engagement_added" +weight: 3 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: engagement_added +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/product_added.md b/docs/content/en/integrations/notification_webhooks/product_added.md new file mode 100644 index 00000000000..2d90a6a681f --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_added.md @@ -0,0 +1,32 @@ +--- +title: "Event: product_added" +weight: 2 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_added +``` + +## Event HTTP body +```json +{ + "description": null, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/product_type_added.md b/docs/content/en/integrations/notification_webhooks/product_type_added.md new file mode 100644 index 00000000000..1171f513831 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_type_added.md @@ -0,0 +1,26 @@ +--- +title: "Event: product_type_added" +weight: 1 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_type_added +``` + +## Event HTTP body +```json +{ + "description": null, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/scan_added.md b/docs/content/en/integrations/notification_webhooks/scan_added.md new file mode 100644 index 00000000000..27a40e6cab1 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/scan_added.md @@ -0,0 +1,90 @@ +--- +title: "Event: scan_added and scan_added_empty" +weight: 5 +chapter: true +--- + +Event `scan_added_empty` describes a situation when reimport did not affect the existing test (no finding has been created or closed). + +## Event HTTP header for scan_added +```yaml +X-DefectDojo-Event: scan_added +``` + +## Event HTTP header for scan_added_empty +```yaml +X-DefectDojo-Event: scan_added_empty +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "finding_count": 4, + "findings": { + "mitigated": [ + { + "id": 233, + "severity": "Medium", + "title": "Mitigated Finding", + "url_api": "http://localhost:8080/api/v2/findings/233/", + "url_ui": "http://localhost:8080/finding/233" + } + ], + "new": [ + { + "id": 232, + "severity": "Critical", + "title": "New Finding", + "url_api": "http://localhost:8080/api/v2/findings/232/", + "url_ui": "http://localhost:8080/finding/232" + } + ], + "reactivated": [ + { + "id": 234, + "severity": "Low", + "title": "Reactivated Finding", + "url_api": "http://localhost:8080/api/v2/findings/234/", + "url_ui": "http://localhost:8080/finding/234" + } + ], + "untouched": [ + { + "id": 235, + "severity": "Info", + "title": "Untouched Finding", + "url_api": "http://localhost:8080/api/v2/findings/235/", + "url_ui": "http://localhost:8080/finding/235" + } + ] + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/test_added.md b/docs/content/en/integrations/notification_webhooks/test_added.md new file mode 100644 index 00000000000..8614a80e0a6 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/test_added.md @@ -0,0 +1,44 @@ +--- +title: "Event: test_added" +weight: 4 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: test_added +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notifications.md b/docs/content/en/integrations/notifications.md index d5af295f0eb..803388797cd 100644 --- a/docs/content/en/integrations/notifications.md +++ b/docs/content/en/integrations/notifications.md @@ -18,6 +18,7 @@ The following notification methods currently exist: - Email - Slack - Microsoft Teams + - Webhooks - Alerts within DefectDojo (default) You can set these notifications on a global scope (if you have @@ -124,4 +125,8 @@ However, there is a specific use-case when the user decides to disable notificat The scope of this setting is customizable (see environmental variable `DD_NOTIFICATIONS_SYSTEM_LEVEL_TRUMP`). -For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) \ No newline at end of file +For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) + +## Webhooks (experimental) + +DefectDojo also supports webhooks that follow the same events as other notifications (you can be notified in the same situations). Details about setup are described in [related page](../notification_webhooks/). diff --git a/docs/content/en/integrations/parsers/file/invicti.md b/docs/content/en/integrations/parsers/file/invicti.md new file mode 100644 index 00000000000..c0ffda1a48e --- /dev/null +++ b/docs/content/en/integrations/parsers/file/invicti.md @@ -0,0 +1,9 @@ +--- +title: "Invicti" +toc_hide: true +--- +Vulnerabilities List - JSON report + +### Sample Scan Data + +Sample Invicti scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/invicti). diff --git a/docs/content/en/integrations/parsers/file/krakend_audit.md b/docs/content/en/integrations/parsers/file/krakend_audit.md new file mode 100644 index 00000000000..9598ce343b8 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/krakend_audit.md @@ -0,0 +1,11 @@ +--- +title: "KrakenD Audit Scan" +toc_hide: true +--- +Import KrakenD Audit Scan results in JSON format. You can use the following command to audit the KrakenD configuration which then can be uploaded to DefectDojo: +``` +krakend audit -c krakend.json -f "{{ marshal . }}" >> recommendations.json +``` + +### Sample Scan Data +Sample KrakenD Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/krakend_audit). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/netsparker.md b/docs/content/en/integrations/parsers/file/netsparker.md index 7e46af07b12..0be436e185b 100644 --- a/docs/content/en/integrations/parsers/file/netsparker.md +++ b/docs/content/en/integrations/parsers/file/netsparker.md @@ -4,5 +4,8 @@ toc_hide: true --- Vulnerabilities List - JSON report +[Netsparker has now become Invicti](https://www.invicti.com/blog/news/netsparker-is-now-invicti-signaling-a-new-era-for-modern-appsec/). Please plan to migrate automation scripts to use the [Invicti Scan](../invicti.md) + ### Sample Scan Data + Sample Netsparker scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/netsparker). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md b/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md new file mode 100644 index 00000000000..e938970a385 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/qualys_hacker_guardian.md @@ -0,0 +1,9 @@ +--- +title: "Qualys Hacker Guardian Scan" +toc_hide: true +--- +Qualys Hacker Guardian CSV export + +### Sample Scan Data + +Sample Qualys Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/qualys_hacker_guardian). \ No newline at end of file diff --git a/docs/content/en/integrations/rate_limiting.md b/docs/content/en/integrations/rate_limiting.md index 0cac784c5f5..1ea76ace5b3 100644 --- a/docs/content/en/integrations/rate_limiting.md +++ b/docs/content/en/integrations/rate_limiting.md @@ -2,7 +2,7 @@ title: "Rate Limiting" description: "Configurable rate limiting on the login page to mitigate brute force attacks" draft: false -weight: 9 +weight: 11 --- diff --git a/docs/content/en/usage/features.md b/docs/content/en/usage/features.md index f1020ffd4c0..5f99f34023f 100644 --- a/docs/content/en/usage/features.md +++ b/docs/content/en/usage/features.md @@ -357,7 +357,7 @@ to the hashcode configuration or calculation logic. We will mention this in the To regenerate the hashcodes, use the `dedupe` management command: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe --hash_code_only +docker compose exec uwsgi ./manage.py dedupe --hash_code_only {{< / highlight >}} This will only regenerated the hashcodes, but will not run any deduplication logic on existing findings. @@ -365,14 +365,14 @@ If you want to run deduplication again on existing findings to make sure any dup hashcode config are marked as such, run: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe +docker compose exec uwsgi ./manage.py dedupe {{< / highlight >}} The deduplication part of this command will run the deduplication for each finding in a celery task. If you want to run the deduplication in the foreground process, use: {{< highlight bash >}} -docker-compose exec uwsgi ./manage.py dedupe --dedupe_sync +docker compose exec uwsgi ./manage.py dedupe --dedupe_sync {{< / highlight >}} Please note the deduplication process is resource intensive and can take a long time to complete @@ -502,10 +502,10 @@ You can of course change this default by modifying that stanza. ### Launching from the CLI You can also invoke the SLA notification function from the CLI. For -example, if run from docker-compose: +example, if run from docker compose: {{< highlight bash >}} -$ docker-compose exec uwsgi /bin/bash -c 'python manage.py sla_notifications' +$ docker compose exec uwsgi /bin/bash -c 'python manage.py sla_notifications' {{< / highlight >}} ## Reports diff --git a/docs/package-lock.json b/docs/package-lock.json index d3d81bb0ec9..93d84625c28 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6,7 +6,7 @@ "": { "devDependencies": { "autoprefixer": "10.4.20", - "postcss": "8.4.45", + "postcss": "8.4.47", "postcss-cli": "11.0.0" } }, @@ -585,9 +585,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "node_modules/picomatch": { @@ -612,9 +612,9 @@ } }, "node_modules/postcss": { - "version": "8.4.45", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.45.tgz", - "integrity": "sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, "funding": [ { @@ -632,8 +632,8 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" @@ -834,9 +834,9 @@ } }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -1372,9 +1372,9 @@ "dev": true }, "picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "picomatch": { @@ -1390,14 +1390,14 @@ "dev": true }, "postcss": { - "version": "8.4.45", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.45.tgz", - "integrity": "sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, "requires": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" } }, "postcss-cli": { @@ -1504,9 +1504,9 @@ "dev": true }, "source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true }, "string-width": { diff --git a/docs/package.json b/docs/package.json index a892ece5668..9720854bf00 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,6 +1,6 @@ { "devDependencies": { - "postcss": "8.4.45", + "postcss": "8.4.47", "autoprefixer": "10.4.20", "postcss-cli": "11.0.0" } diff --git a/dojo/announcement/views.py b/dojo/announcement/views.py index 6b0cb16bc3c..26160c3236b 100644 --- a/dojo/announcement/views.py +++ b/dojo/announcement/views.py @@ -81,12 +81,11 @@ def dismiss_announcement(request): extra_tags="alert-success", ) return HttpResponseRedirect("dashboard") - else: - messages.add_message( - request, - messages.ERROR, - _("Failed to remove announcement."), - extra_tags="alert-danger", - ) - return render(request, "dojo/dismiss_announcement.html") + messages.add_message( + request, + messages.ERROR, + _("Failed to remove announcement."), + extra_tags="alert-danger", + ) + return render(request, "dojo/dismiss_announcement.html") return render(request, "dojo/dismiss_announcement.html") diff --git a/dojo/api_v2/exception_handler.py b/dojo/api_v2/exception_handler.py index 513c98004b7..8f395026b03 100644 --- a/dojo/api_v2/exception_handler.py +++ b/dojo/api_v2/exception_handler.py @@ -2,6 +2,7 @@ from django.core.exceptions import ValidationError from django.db.models.deletion import RestrictedError +from rest_framework.exceptions import ParseError from rest_framework.response import Response from rest_framework.status import ( HTTP_400_BAD_REQUEST, @@ -20,7 +21,11 @@ def custom_exception_handler(exc, context): # to get the standard error response. response = exception_handler(exc, context) - if isinstance(exc, RestrictedError): + if isinstance(exc, ParseError) and "JSON parse error" in str(exc): + response = Response() + response.status_code = HTTP_400_BAD_REQUEST + response.data = {"message": "JSON request content is malformed"} + elif isinstance(exc, RestrictedError): # An object cannot be deleted because it has dependent objects. response = Response() response.status_code = HTTP_409_CONFLICT diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index e32683c3742..6c6b4792757 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -29,8 +29,7 @@ def delete_preview(self, request, pk=None): def flatten(elem): if isinstance(elem, list): return itertools.chain.from_iterable(map(flatten, elem)) - else: - return [elem] + return [elem] rels = [ { diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py index f7669826830..fe508c92b1b 100644 --- a/dojo/api_v2/permissions.py +++ b/dojo/api_v2/permissions.py @@ -35,8 +35,7 @@ def check_post_permission(request, post_model, post_pk, post_permission): raise ParseError(msg) object = get_object_or_404(post_model, pk=request.data.get(post_pk)) return user_has_permission(request.user, object, post_permission) - else: - return True + return True def check_object_permission( @@ -49,14 +48,13 @@ def check_object_permission( ): if request.method == "GET": return user_has_permission(request.user, object, get_permission) - elif request.method == "PUT" or request.method == "PATCH": + if request.method == "PUT" or request.method == "PATCH": return user_has_permission(request.user, object, put_permission) - elif request.method == "DELETE": + if request.method == "DELETE": return user_has_permission(request.user, object, delete_permission) - elif request.method == "POST": + if request.method == "POST": return user_has_permission(request.user, object, post_permission) - else: - return False + return False class UserHasAppAnalysisPermission(permissions.BasePermission): @@ -113,12 +111,11 @@ def has_permission(self, request, view): return user_has_configuration_permission( request.user, "auth.view_group", ) - elif request.method == "POST": + if request.method == "POST": return user_has_configuration_permission( request.user, "auth.add_group", ) - else: - return True + return True def has_object_permission(self, request, view, obj): if request.method == "GET": @@ -130,14 +127,13 @@ def has_object_permission(self, request, view, obj): ) and user_has_permission( request.user, obj, Permissions.Group_View, ) - else: - return check_object_permission( - request, - obj, - Permissions.Group_View, - Permissions.Group_Edit, - Permissions.Group_Delete, - ) + return check_object_permission( + request, + obj, + Permissions.Group_View, + Permissions.Group_Edit, + Permissions.Group_Delete, + ) class UserHasDojoGroupMemberPermission(permissions.BasePermission): @@ -188,8 +184,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -293,9 +288,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Engagement_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasEngagementPermission.path_engagement_post.match( @@ -308,15 +302,14 @@ def has_object_permission(self, request, view, obj): Permissions.Engagement_Edit, Permissions.Engagement_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Engagement_View, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Engagement_View, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + ) class UserHasRiskAcceptancePermission(permissions.BasePermission): @@ -334,9 +327,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Risk_Acceptance, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasRiskAcceptancePermission.path_risk_acceptance_post.match( @@ -351,15 +343,14 @@ def has_object_permission(self, request, view, obj): Permissions.Risk_Acceptance, Permissions.Risk_Acceptance, ) - else: - return check_object_permission( - request, - obj, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - ) + return check_object_permission( + request, + obj, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + ) class UserHasFindingPermission(permissions.BasePermission): @@ -382,9 +373,8 @@ def has_permission(self, request, view): return check_post_permission( request, Test, "test", Permissions.Finding_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if ( @@ -402,15 +392,14 @@ def has_object_permission(self, request, view, obj): Permissions.Finding_Edit, Permissions.Finding_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Finding_View, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Finding_View, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + ) class UserHasImportPermission(permissions.BasePermission): @@ -435,7 +424,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, engagement, Permissions.Import_Scan_Result, ) - elif engagement_id := converted_dict.get("engagement_id"): + if engagement_id := converted_dict.get("engagement_id"): # engagement_id doesn't exist msg = f'Engagement "{engagement_id}" does not exist' raise serializers.ValidationError(msg) @@ -452,19 +441,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need engagement_id or product_name + engagement_name to perform import", ) - else: - # the engagement doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need engagement_id or product_name + engagement_name to perform import", - ) + return None + # the engagement doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need engagement_id or product_name + engagement_name to perform import", + ) class UserHasMetaImportPermission(permissions.BasePermission): @@ -490,13 +479,12 @@ def has_permission(self, request, view): return user_has_permission( request.user, product, Permissions.Import_Scan_Result, ) - elif product_id := converted_dict.get("product_id"): + if product_id := converted_dict.get("product_id"): # product_id doesn't exist msg = f'Product "{product_id}" does not exist' raise serializers.ValidationError(msg) - else: - msg = "Need product_id or product_name to perform import" - raise serializers.ValidationError(msg) + msg = "Need product_id or product_name to perform import" + raise serializers.ValidationError(msg) class UserHasProductPermission(permissions.BasePermission): @@ -556,8 +544,7 @@ def has_permission(self, request, view): return user_has_global_permission( request.user, Permissions.Product_Type_Add, ) - else: - return True + return True def has_object_permission(self, request, view, obj): return check_object_permission( @@ -631,7 +618,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, test, Permissions.Import_Scan_Result, ) - elif test_id := converted_dict.get("test_id"): + if test_id := converted_dict.get("test_id"): # test_id doesn't exist msg = f'Test "{test_id}" does not exist' raise serializers.ValidationError(msg) @@ -648,19 +635,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need test_id or product_name + engagement_name + scan_type to perform reimport", ) - else: - # the test doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need test_id or product_name + engagement_name + scan_type to perform reimport", - ) + return None + # the test doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need test_id or product_name + engagement_name + scan_type to perform reimport", + ) class UserHasTestPermission(permissions.BasePermission): @@ -676,9 +663,8 @@ def has_permission(self, request, view): return check_post_permission( request, Engagement, "engagement", Permissions.Test_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasTestPermission.path_tests_post.match( @@ -691,15 +677,14 @@ def has_object_permission(self, request, view, obj): Permissions.Test_Edit, Permissions.Test_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Test_View, - Permissions.Test_Edit, - Permissions.Test_Edit, - Permissions.Test_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Test_View, + Permissions.Test_Edit, + Permissions.Test_Edit, + Permissions.Test_Edit, + ) class UserHasTestImportPermission(permissions.BasePermission): @@ -776,8 +761,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -840,8 +824,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -934,9 +917,8 @@ def raise_no_auto_create_import_validation_error( if product_type_name: msg = f'Product "{product_name}" does not exist in Product_Type "{product_type_name}"' raise serializers.ValidationError(msg) - else: - msg = f'Product "{product_name}" does not exist' - raise serializers.ValidationError(msg) + msg = f'Product "{product_name}" does not exist' + raise serializers.ValidationError(msg) if engagement_name and not engagement: msg = f'Engagement "{engagement_name}" does not exist in Product "{product_name}"' @@ -1021,12 +1003,11 @@ def check_auto_create_permission( # new product type can be created with current user as owner, so # all objects in it can be created as well return True - else: - if not user_has_permission( - user, product_type, Permissions.Product_Type_Add_Product, - ): - msg = f'No permission to create products in product_type "{product_type}"' - raise PermissionDenied(msg) + if not user_has_permission( + user, product_type, Permissions.Product_Type_Add_Product, + ): + msg = f'No permission to create products in product_type "{product_type}"' + raise PermissionDenied(msg) # product can be created, so objects in it can be created as well return True diff --git a/dojo/api_v2/prefetch/prefetcher.py b/dojo/api_v2/prefetch/prefetcher.py index 79a4b0e7314..3596b3f9409 100644 --- a/dojo/api_v2/prefetch/prefetcher.py +++ b/dojo/api_v2/prefetch/prefetcher.py @@ -3,11 +3,17 @@ from rest_framework.serializers import ModelSerializer +from dojo.models import FileUpload + from . import utils # Reduce the scope of search for serializers. SERIALIZER_DEFS_MODULE = "dojo.api_v2.serializers" +preferred_serializers = { + FileUpload: "FileSerializer", +} + class _Prefetcher: @staticmethod @@ -31,7 +37,11 @@ def _is_model_serializer(obj): for _, serializer in available_serializers: model = serializer.Meta.model - serializers[model] = serializer + if model in preferred_serializers: + if serializer.__name__ == preferred_serializers[model]: + serializers[model] = serializer + else: + serializers[model] = serializer # We add object->None to have a more uniform processing later on serializers[object] = None diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index c9a87a8362d..5109bd068f0 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -77,6 +77,7 @@ Note_Type, NoteHistory, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -226,9 +227,7 @@ def to_internal_value(self, data): substrings = re.findall(r'(?:"[^"]*"|[^",]+)', s) data_safe.extend(substrings) - internal_value = tagulous.utils.render_tags(data_safe) - - return internal_value + return tagulous.utils.render_tags(data_safe) def to_representation(self, value): if not isinstance(value, list): @@ -304,8 +303,7 @@ def __str__(self): return json.dumps( self, sort_keys=True, indent=4, separators=(",", ": "), ) - else: - return json.dumps(self) + return json.dumps(self) class RequestResponseSerializerField(serializers.ListSerializer): @@ -429,6 +427,7 @@ class Meta: class UserSerializer(serializers.ModelSerializer): date_joined = serializers.DateTimeField(read_only=True) last_login = serializers.DateTimeField(read_only=True, allow_null=True) + email = serializers.EmailField(required=True) password = serializers.CharField( write_only=True, style={"input_type": "password"}, @@ -549,14 +548,14 @@ def validate(self, data): msg = "Only superusers are allowed to add or edit superusers." raise ValidationError(msg) - if ( - self.context["request"].method in ["PATCH", "PUT"] - and "password" in data - ): + if self.context["request"].method in ["PATCH", "PUT"] and "password" in data: msg = "Update of password though API is not allowed" raise ValidationError(msg) - else: - return super().validate(data) + + if self.context["request"].method == "POST" and "password" not in data: + msg = "Passwords must be supplied for new users" + raise ValidationError(msg) + return super().validate(data) class UserContactInfoSerializer(serializers.ModelSerializer): @@ -821,6 +820,7 @@ def validate(self, data): ) raise ValidationError(msg) return data + return None class RawFileSerializer(serializers.ModelSerializer): @@ -1073,8 +1073,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"engagement_id": engagement.id, "files": new_files} - return new_data + return {"engagement_id": engagement.id, "files": new_files} class EngagementCheckListSerializer(serializers.ModelSerializer): @@ -1146,8 +1145,7 @@ def run_validators(self, initial_data): if "finding, endpoint must make a unique set" in str(exc): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) from exc - else: - raise + raise def create(self, validated_data): endpoint = validated_data.get("endpoint") @@ -1160,8 +1158,7 @@ def create(self, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise status.mitigated = validated_data.get("mitigated", False) status.false_positive = validated_data.get("false_positive", False) status.out_of_scope = validated_data.get("out_of_scope", False) @@ -1177,8 +1174,7 @@ def update(self, instance, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise class EndpointSerializer(TaggitSerializer, serializers.ModelSerializer): @@ -1439,8 +1435,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"test_id": test.id, "files": new_files} - return new_data + return {"test_id": test.id, "files": new_files} class TestImportFindingActionSerializer(serializers.ModelSerializer): @@ -1467,7 +1462,8 @@ class RiskAcceptanceSerializer(serializers.ModelSerializer): def create(self, validated_data): instance = super().create(validated_data) - add_findings_to_risk_acceptance(instance, instance.accepted_findings.all()) + user = getattr(self.context.get("request", None), "user", None) + add_findings_to_risk_acceptance(user, instance, instance.accepted_findings.all()) return instance def update(self, instance, validated_data): @@ -1481,11 +1477,12 @@ def update(self, instance, validated_data): findings_to_remove = Finding.objects.filter(id__in=[x.id for x in findings_to_remove]) # Make the update in the database instance = super().update(instance, validated_data) + user = getattr(self.context.get("request", None), "user", None) # Add the new findings - add_findings_to_risk_acceptance(instance, findings_to_add) + add_findings_to_risk_acceptance(user, instance, findings_to_add) # Remove the ones that were not present in the payload for finding in findings_to_remove: - remove_finding_from_risk_acceptance(instance, finding) + remove_finding_from_risk_acceptance(user, instance, finding) return instance @extend_schema_field(serializers.CharField()) @@ -1696,8 +1693,7 @@ def get_related_fields(self, obj): return FindingRelatedFieldsSerializer( required=False, ).to_representation(obj) - else: - return None + return None def get_display_status(self, obj) -> str: return obj.status() @@ -1741,8 +1737,7 @@ def update(self, instance, validated_data): # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(instance, to_be_tagged) - return tag_object + return self._save_tags(instance, to_be_tagged) def validate(self, data): if self.context["request"].method == "PATCH": @@ -1878,8 +1873,7 @@ def create(self, validated_data): # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(new_finding, to_be_tagged) - return tag_object + return self._save_tags(new_finding, to_be_tagged) def validate(self, data): if "reporter" not in data: @@ -2795,8 +2789,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"finding_id": finding.id, "files": new_files} - return new_data + return {"finding_id": finding.id, "files": new_files} class FindingCloseSerializer(serializers.ModelSerializer): @@ -3053,10 +3046,9 @@ class QuestionnaireQuestionSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextQuestion): return TextQuestionSerializer(instance=instance).data - elif isinstance(instance, ChoiceQuestion): + if isinstance(instance, ChoiceQuestion): return ChoiceQuestionSerializer(instance=instance).data - else: - return QuestionSerializer(instance=instance).data + return QuestionSerializer(instance=instance).data class Meta: model = Question @@ -3093,10 +3085,9 @@ class QuestionnaireAnswerSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextAnswer): return TextAnswerSerializer(instance=instance).data - elif isinstance(instance, ChoiceAnswer): + if isinstance(instance, ChoiceAnswer): return ChoiceAnswerSerializer(instance=instance).data - else: - return AnswerSerializer(instance=instance).data + return AnswerSerializer(instance=instance).data class Meta: model = Answer @@ -3170,5 +3161,10 @@ def create(self, validated_data): if 'duplicate key value violates unique constraint "dojo_announcement_pkey"' in str(e): msg = "No more than one Announcement is allowed" raise serializers.ValidationError(msg) - else: - raise + raise + + +class NotificationWebhooksSerializer(serializers.ModelSerializer): + class Meta: + model = Notification_Webhooks + fields = "__all__" diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index 05d16521069..9e8368d6277 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -111,6 +111,7 @@ Network_Locations, Note_Type, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -172,6 +173,33 @@ logger = logging.getLogger(__name__) +def schema_with_prefetch() -> dict: + return { + "list": extend_schema( + parameters=[ + OpenApiParameter( + "prefetch", + OpenApiTypes.STR, + OpenApiParameter.QUERY, + required=False, + description="List of fields for which to prefetch model instances and add those to the response", + ), + ], + ), + "retrieve": extend_schema( + parameters=[ + OpenApiParameter( + "prefetch", + OpenApiTypes.STR, + OpenApiParameter.QUERY, + required=False, + description="List of fields for which to prefetch model instances and add those to the response", + ), + ], + ), + } + + class DojoOpenApiJsonRenderer(OpenApiJsonRenderer2): def get_indent(self, accepted_media_type, renderer_context): if accepted_media_type and "indent" in accepted_media_type: @@ -211,30 +239,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class DojoGroupViewSet( PrefetchDojoModelViewSet, ): @@ -252,30 +257,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class DojoGroupMemberViewSet( PrefetchDojoModelViewSet, ): @@ -301,6 +283,7 @@ def partial_update(self, request, pk=None): # Authorization: superuser +@extend_schema_view(**schema_with_prefetch()) class GlobalRoleViewSet( PrefetchDojoModelViewSet, ): @@ -315,6 +298,7 @@ def get_queryset(self): # Authorization: object-based +# @extend_schema_view(**schema_with_prefetch()) class EndPointViewSet( PrefetchDojoModelViewSet, ): @@ -370,6 +354,7 @@ def generate_report(self, request, pk=None): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class EndpointStatusViewSet( PrefetchDojoModelViewSet, ): @@ -398,6 +383,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class EngagementViewSet( PrefetchDojoModelViewSet, ra_api.AcceptedRisksMixin, @@ -651,6 +637,7 @@ def download_file(self, request, file_id, pk=None): return generate_file_response(file_object) +@extend_schema_view(**schema_with_prefetch()) class RiskAcceptanceViewSet( PrefetchDojoModelViewSet, ): @@ -668,7 +655,7 @@ def destroy(self, request, pk=None): instance = self.get_object() # Remove any findings on the risk acceptance for finding in instance.accepted_findings.all(): - remove_finding_from_risk_acceptance(instance, finding) + remove_finding_from_risk_acceptance(request.user, instance, finding) # return the response of the object being deleted return super().destroy(request, pk=pk) @@ -716,6 +703,7 @@ def download_proof(self, request, pk=None): # These are technologies in the UI and the API! # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class AppAnalysisViewSet( PrefetchDojoModelViewSet, ): @@ -734,6 +722,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class CredentialsViewSet( PrefetchDojoModelViewSet, ): @@ -747,6 +736,7 @@ def get_queryset(self): # Authorization: configuration +@extend_schema_view(**schema_with_prefetch()) class CredentialsMappingViewSet( PrefetchDojoModelViewSet, ): @@ -878,8 +868,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.FindingCreateSerializer - else: - return serializers.FindingSerializer + return serializers.FindingSerializer @extend_schema( methods=["POST"], @@ -1226,10 +1215,9 @@ def remove_tags(self, request, pk=None): {"success": "Tag(s) Removed"}, status=status.HTTP_204_NO_CONTENT, ) - else: - return Response( - delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, + ) @extend_schema( responses={ @@ -1367,10 +1355,9 @@ def _add_metadata(self, request, finding): ) return Response(data=metadata_data.data, status=status.HTTP_200_OK) - else: - return Response( - metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, + ) def _remove_metadata(self, request, finding): name = request.query_params.get("name", None) @@ -1457,13 +1444,13 @@ def metadata(self, request, pk=None): if request.method == "GET": return self._get_metadata(request, finding) - elif request.method == "POST": + if request.method == "POST": return self._add_metadata(request, finding) - elif request.method == "PUT": + if request.method == "PUT": return self._edit_metadata(request, finding) - elif request.method == "PATCH": + if request.method == "PATCH": return self._edit_metadata(request, finding) - elif request.method == "DELETE": + if request.method == "DELETE": return self._remove_metadata(request, finding) return Response( @@ -1486,6 +1473,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class JiraIssuesViewSet( PrefetchDojoModelViewSet, ): @@ -1511,6 +1499,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class JiraProjectViewSet( PrefetchDojoModelViewSet, ): @@ -1573,6 +1562,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class ProductAPIScanConfigurationViewSet( PrefetchDojoModelViewSet, ): @@ -1599,30 +1589,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class DojoMetaViewSet( PrefetchDojoModelViewSet, ): @@ -1646,30 +1613,7 @@ def get_queryset(self): return get_authorized_dojo_meta(Permissions.Product_View) -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductViewSet( prefetch.PrefetchListMixin, prefetch.PrefetchRetrieveMixin, @@ -1745,30 +1689,7 @@ def generate_report(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductMemberViewSet( PrefetchDojoModelViewSet, ): @@ -1796,30 +1717,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductGroupViewSet( PrefetchDojoModelViewSet, ): @@ -1847,30 +1745,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeViewSet( PrefetchDojoModelViewSet, ): @@ -1955,30 +1830,7 @@ def generate_report(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeMemberViewSet( PrefetchDojoModelViewSet, ): @@ -2020,30 +1872,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class ProductTypeGroupViewSet( PrefetchDojoModelViewSet, ): @@ -2071,6 +1900,7 @@ def partial_update(self, request, pk=None): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class StubFindingsViewSet( PrefetchDojoModelViewSet, ): @@ -2091,8 +1921,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.StubFindingCreateSerializer - else: - return serializers.StubFindingSerializer + return serializers.StubFindingSerializer # Authorization: authenticated, configuration @@ -2109,6 +1938,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class TestsViewSet( PrefetchDojoModelViewSet, ra_api.AcceptedRisksMixin, @@ -2144,8 +1974,7 @@ def get_serializer_class(self): if self.action == "accept_risks": return ra_api.AcceptedRiskSerializer return serializers.TestCreateSerializer - else: - return serializers.TestSerializer + return serializers.TestSerializer @extend_schema( request=serializers.ReportGenerateOptionSerializer, @@ -2316,30 +2145,7 @@ def get_queryset(self): return Test_Type.objects.all().order_by("id") -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class TestImportViewSet( PrefetchDojoModelViewSet, ): @@ -2398,6 +2204,7 @@ def get_queryset(self): # Authorization: configurations +@extend_schema_view(**schema_with_prefetch()) class ToolConfigurationsViewSet( PrefetchDojoModelViewSet, ): @@ -2418,6 +2225,7 @@ def get_queryset(self): # Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) class ToolProductSettingsViewSet( PrefetchDojoModelViewSet, ): @@ -2502,30 +2310,7 @@ def destroy(self, request, *args, **kwargs): # Authorization: superuser -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class UserContactInfoViewSet( PrefetchDojoModelViewSet, ): @@ -2680,30 +2465,7 @@ def get_queryset(self): # Authorization: object-based -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class LanguageViewSet( PrefetchDojoModelViewSet, ): @@ -3147,30 +2909,7 @@ def get_queryset(self): # Authorization: superuser -@extend_schema_view( - list=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), - retrieve=extend_schema( - parameters=[ - OpenApiParameter( - "prefetch", - OpenApiTypes.STR, - OpenApiParameter.QUERY, - required=False, - description="List of fields for which to prefetch model instances and add those to the response", - ), - ], - ), -) +@extend_schema_view(**schema_with_prefetch()) class NotificationsViewSet( PrefetchDojoModelViewSet, ): @@ -3184,6 +2923,7 @@ def get_queryset(self): return Notifications.objects.all().order_by("id") +@extend_schema_view(**schema_with_prefetch()) class EngagementPresetsViewset( PrefetchDojoModelViewSet, ): @@ -3303,6 +3043,7 @@ def get_queryset(self): return Engagement_Survey.objects.all().order_by("id") +@extend_schema_view(**schema_with_prefetch()) class QuestionnaireAnsweredSurveyViewSet( prefetch.PrefetchListMixin, prefetch.PrefetchRetrieveMixin, @@ -3332,3 +3073,13 @@ class AnnouncementViewSet( def get_queryset(self): return Announcement.objects.all().order_by("id") + + +class NotificationWebhooksViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.NotificationWebhooksSerializer + queryset = Notification_Webhooks.objects.all() + filter_backends = (DjangoFilterBackend,) + filterset_fields = "__all__" + permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) # TODO: add permission also for other users diff --git a/dojo/apps.py b/dojo/apps.py index e7a39ab5433..fd3a06575fd 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -92,8 +92,7 @@ def get_model_fields_with_extra(model, extra_fields=()): def get_model_fields(default_fields, extra_fields=()): - combined = default_fields + extra_fields - return combined + return default_fields + extra_fields def get_model_default_fields(model): diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py index a542d7c6e01..8f013b60061 100644 --- a/dojo/authorization/authorization.py +++ b/dojo/authorization/authorization.py @@ -66,7 +66,7 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_type_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Product) and permission.value >= Permissions.Product_View.value ): @@ -87,51 +87,51 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Engagement) and permission in Permissions.get_engagement_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Test) and permission in Permissions.get_test_permissions() ): return user_has_permission(user, obj.engagement.product, permission) - elif ( + if ( isinstance(obj, Finding) or isinstance(obj, Stub_Finding) ) and permission in Permissions.get_finding_permissions(): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Finding_Group) and permission in Permissions.get_finding_group_permissions() ): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Endpoint) and permission in Permissions.get_endpoint_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Languages) and permission in Permissions.get_language_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, App_Analysis) and permission in Permissions.get_technology_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_API_Scan_Configuration) and permission in Permissions.get_product_api_scan_configuration_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_Type_Member) and permission in Permissions.get_product_type_member_permissions() ): @@ -140,9 +140,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product_type, permission, ) - else: - return user_has_permission(user, obj.product_type, permission) - elif ( + return user_has_permission(user, obj.product_type, permission) + if ( isinstance(obj, Product_Member) and permission in Permissions.get_product_member_permissions() ): @@ -151,19 +150,18 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product, permission, ) - else: - return user_has_permission(user, obj.product, permission) - elif ( + return user_has_permission(user, obj.product, permission) + if ( isinstance(obj, Product_Type_Group) and permission in Permissions.get_product_type_group_permissions() ): return user_has_permission(user, obj.product_type, permission) - elif ( + if ( isinstance(obj, Product_Group) and permission in Permissions.get_product_group_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Dojo_Group) and permission in Permissions.get_group_permissions() ): @@ -173,7 +171,7 @@ def user_has_permission(user, obj, permission): return group_member is not None and role_has_permission( group_member.role.id, permission, ) - elif ( + if ( isinstance(obj, Dojo_Group_Member) and permission in Permissions.get_group_member_permissions() ): @@ -182,9 +180,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.group, permission, ) - else: - return user_has_permission(user, obj.group, permission) - elif ( + return user_has_permission(user, obj.group, permission) + if ( isinstance(obj, Cred_Mapping) and permission in Permissions.get_credential_permissions() ): @@ -202,9 +199,9 @@ def user_has_permission(user, obj, permission): return user_has_permission( user, obj.finding.test.engagement.product, permission, ) - else: - msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" - raise NoAuthorizationImplementedError(msg) + return None + msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" + raise NoAuthorizationImplementedError(msg) def user_has_global_permission(user, permission): diff --git a/dojo/cred/queries.py b/dojo/cred/queries.py index 4dd14385a06..28419772328 100644 --- a/dojo/cred/queries.py +++ b/dojo/cred/queries.py @@ -44,8 +44,6 @@ def get_authorized_cred_mappings(permission, queryset=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - cred_mappings = cred_mappings.filter( + return cred_mappings.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return cred_mappings diff --git a/dojo/cred/views.py b/dojo/cred/views.py index 31f923748b3..2fc373c3ac9 100644 --- a/dojo/cred/views.py +++ b/dojo/cred/views.py @@ -641,10 +641,8 @@ def delete_cred_controller(request, destination_url, id, ttid): if destination_url == "cred": return HttpResponseRedirect(reverse(destination_url)) - else: - return HttpResponseRedirect(reverse(destination_url, args=(id, ))) - else: - tform = CredMappingForm(instance=cred) + return HttpResponseRedirect(reverse(destination_url, args=(id, ))) + tform = CredMappingForm(instance=cred) add_breadcrumb(title="Delete Credential", top_level=False, request=request) product_tab = None diff --git a/dojo/db_migrations/0215_webhooks_notifications.py b/dojo/db_migrations/0215_webhooks_notifications.py new file mode 100644 index 00000000000..cc65ce43f1b --- /dev/null +++ b/dojo/db_migrations/0215_webhooks_notifications.py @@ -0,0 +1,130 @@ +# Generated by Django 5.0.8 on 2024-08-16 17:07 + +import django.db.models.deletion +import multiselectfield.db.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0214_test_type_dynamically_generated'), + ] + + operations = [ + migrations.AddField( + model_name='system_settings', + name='enable_webhooks_notifications', + field=models.BooleanField(default=False, verbose_name='Enable Webhook notifications'), + ), + migrations.AddField( + model_name='system_settings', + name='webhooks_notifications_timeout', + field=models.IntegerField(default=10, help_text='How many seconds will DefectDojo waits for response from webhook endpoint'), + ), + migrations.AlterField( + model_name='notifications', + name='auto_close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='code_review', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='engagement_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='jira_update', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=33, verbose_name='JIRA problems'), + ), + migrations.AlterField( + model_name='notifications', + name='other', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_type_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='review_requested', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='risk_acceptance_expiration', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) Risk Acceptance expiries', max_length=33, verbose_name='Risk Acceptance Expiration'), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added_empty', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=[], help_text='Triggered whenever an (re-)import has been done (even if that created/updated/closed no findings).', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches', max_length=33, verbose_name='SLA breach'), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach_combined', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches (a message per project)', max_length=33, verbose_name='SLA breach (combined)'), + ), + migrations.AlterField( + model_name='notifications', + name='stale_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='test_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='upcoming_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='user_mentioned', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.CreateModel( + name='Notification_Webhooks', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(default='', help_text='Name of the incoming webhook', max_length=100, unique=True)), + ('url', models.URLField(default='', help_text='The full URL of the incoming webhook')), + ('header_name', models.CharField(blank=True, default='', help_text='Name of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('header_value', models.CharField(blank=True, default='', help_text='Content of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('status', models.CharField(choices=[('active', 'Active'), ('active_tmp', 'Active but 5xx (or similar) error detected'), ('inactive_tmp', 'Temporary inactive because of 5xx (or similar) error'), ('inactive_permanent', 'Permanently inactive')], default='active', editable=False, help_text='Status of the incoming webhook', max_length=20)), + ('first_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened first time', null=True)), + ('last_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened last time', null=True)), + ('note', models.CharField(blank=True, default='', editable=False, help_text='Description of the latest error', max_length=1000, null=True)), + ('owner', models.ForeignKey(blank=True, help_text='Owner/receiver of notification, if empty processed as system notification', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user')), + ], + ), + ] diff --git a/dojo/decorators.py b/dojo/decorators.py index 129106c74de..b6902b8dc10 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -43,8 +43,7 @@ def __wrapper__(*args, **kwargs): countdown = kwargs.pop("countdown", 0) if we_want_async(*args, func=func, **kwargs): return func.apply_async(args=args, kwargs=kwargs, countdown=countdown) - else: - return func(*args, **kwargs) + return func(*args, **kwargs) return __wrapper__ @@ -78,8 +77,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_to_id_internal - else: - return dojo_model_to_id_internal(_func) + return dojo_model_to_id_internal(_func) # decorator with parameters needs another wrapper layer @@ -123,8 +121,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_from_id_internal - else: - return dojo_model_from_id_internal(_func) + return dojo_model_from_id_internal(_func) def get_parameter_froms_args_kwargs(args, kwargs, parameter): diff --git a/dojo/endpoint/queries.py b/dojo/endpoint/queries.py index 581feefc13b..684eeab7b1a 100644 --- a/dojo/endpoint/queries.py +++ b/dojo/endpoint/queries.py @@ -53,12 +53,10 @@ def get_authorized_endpoints(permission, queryset=None, user=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - endpoints = endpoints.filter( + return endpoints.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return endpoints - def get_authorized_endpoint_status(permission, queryset=None, user=None): @@ -101,8 +99,6 @@ def get_authorized_endpoint_status(permission, queryset=None, user=None): endpoint__product__member=Exists(authorized_product_roles), endpoint__product__prod_type__authorized_group=Exists(authorized_product_type_groups), endpoint__product__authorized_group=Exists(authorized_product_groups)) - endpoint_status = endpoint_status.filter( + return endpoint_status.filter( Q(endpoint__product__prod_type__member=True) | Q(endpoint__product__member=True) | Q(endpoint__product__prod_type__authorized_group=True) | Q(endpoint__product__authorized_group=True)) - - return endpoint_status diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index be1c63fb0c0..d5c378e5e97 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -79,17 +79,16 @@ def endpoint_get_or_create(**kwargs): count = qs.count() if count == 0: return Endpoint.objects.get_or_create(**kwargs) - elif count == 1: - return qs.order_by("id").first(), False - else: - logger.warning( - f"Endpoints in your database are broken. " - f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", - ) - # Get the oldest endpoint first, and return that instead - # a datetime is not captured on the endpoint model, so ID - # will have to work here instead + if count == 1: return qs.order_by("id").first(), False + logger.warning( + f"Endpoints in your database are broken. " + f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", + ) + # Get the oldest endpoint first, and return that instead + # a datetime is not captured on the endpoint model, so ID + # will have to work here instead + return qs.order_by("id").first(), False def clean_hosts_run(apps, change): @@ -325,7 +324,7 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me 'The column "hostname" must be present to map host to Endpoint.', extra_tags="alert-danger") return HttpResponseRedirect(reverse("import_endpoint_meta", args=(product.id, ))) - elif origin == "API": + if origin == "API": msg = 'The column "hostname" must be present to map host to Endpoint.' raise ValidationError(msg) @@ -361,14 +360,14 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me for tag in existing_tags: if item[0] not in tag: continue - else: - # found existing. Update it - existing_tags.remove(tag) - break + # found existing. Update it + existing_tags.remove(tag) + break existing_tags += [item[0] + ":" + item[1]] # if tags are not supposed to be added, this value remain unchanged endpoint.tags = existing_tags endpoint.save() + return None def remove_broken_endpoint_statuses(apps): diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index 571f4989ec2..06ee7ac24a1 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -98,9 +98,8 @@ def get_endpoint_ids(endpoints): key = f"{e.host}-{e.product.id}" if key in hosts: continue - else: - hosts.append(key) - ids.append(e.id) + hosts.append(key) + ids.append(e.id) return ids @@ -307,8 +306,7 @@ def add_meta_data(request, eid): extra_tags="alert-success") if "add_another" in request.POST: return HttpResponseRedirect(reverse("add_endpoint_meta_data", args=(eid,))) - else: - return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) + return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) else: form = DojoMetaDataForm() diff --git a/dojo/engagement/queries.py b/dojo/engagement/queries.py index 9d8e9b6ae41..97eeb31bdfa 100644 --- a/dojo/engagement/queries.py +++ b/dojo/engagement/queries.py @@ -39,8 +39,6 @@ def get_authorized_engagements(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - engagements = engagements.filter( + return engagements.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return engagements diff --git a/dojo/engagement/signals.py b/dojo/engagement/signals.py index c2f09c9abbd..7b95d6fe87b 100644 --- a/dojo/engagement/signals.py +++ b/dojo/engagement/signals.py @@ -16,7 +16,7 @@ def engagement_post_save(sender, instance, created, **kwargs): if created: title = _('Engagement created for "%(product)s": %(name)s') % {"product": instance.product, "name": instance.name} create_notification(event="engagement_added", title=title, engagement=instance, product=instance.product, - url=reverse("view_engagement", args=(instance.id,))) + url=reverse("view_engagement", args=(instance.id,)), url_api=reverse("engagement-detail", args=(instance.id,))) @receiver(pre_save, sender=Engagement) diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 777a5f7a118..94e7cec960d 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -166,15 +166,13 @@ def get_filtered_engagements(request, view): filter_string_matching = get_system_setting("filter_string_matching", False) filter_class = EngagementDirectFilterWithoutObjectLookups if filter_string_matching else EngagementDirectFilter - engagements = filter_class(request.GET, queryset=engagements) - - return engagements + return filter_class(request.GET, queryset=engagements) def get_test_counts(engagements): # Get the test counts per engagement. As a separate query, this is much # faster than annotating the above `engagements` query. - engagement_test_counts = { + return { test["engagement"]: test["test_count"] for test in Test.objects.filter( engagement__in=engagements, @@ -184,7 +182,6 @@ def get_test_counts(engagements): test_count=Count("engagement"), ) } - return engagement_test_counts def engagements(request, view): @@ -304,9 +301,8 @@ def edit_engagement(request, eid): if "_Add Tests" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(engagement.id, ))) - else: - return HttpResponseRedirect( - reverse("view_engagement", args=(engagement.id, ))) + return HttpResponseRedirect( + reverse("view_engagement", args=(engagement.id, ))) else: logger.debug(form.errors) @@ -404,12 +400,11 @@ def copy_engagement(request, eid): recipients=[engagement.lead], icon="exclamation-triangle") return redirect_to_return_url_or_else(request, reverse("view_engagements", args=(product.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy engagement, please try again.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to copy engagement, please try again.", + extra_tags="alert-danger") product_tab = Product_Tab(product, title="Copy Engagement", tab="engagements") return render(request, "dojo/copy_object.html", { @@ -427,8 +422,7 @@ def get_template(self): return "dojo/view_eng.html" def get_risks_accepted(self, eng): - risks_accepted = eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) - return risks_accepted + return eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) def get_filtered_tests( self, @@ -673,10 +667,10 @@ def add_tests(request, eid): if "_Add Another Test" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(eng.id, ))) - elif "_Add Findings" in request.POST: + if "_Add Findings" in request.POST: return HttpResponseRedirect( reverse("add_findings", args=(new_test.id, ))) - elif "_Finished" in request.POST: + if "_Finished" in request.POST: return HttpResponseRedirect( reverse("view_engagement", args=(eng.id, ))) else: @@ -751,8 +745,7 @@ def get_form( """ if request.method == "POST": return ImportScanForm(request.POST, request.FILES, **kwargs) - else: - return ImportScanForm(**kwargs) + return ImportScanForm(**kwargs) def get_credential_form( self, @@ -766,18 +759,17 @@ def get_credential_form( """ if request.method == "POST": return CredMappingForm(request.POST) - else: - # If the engagement is not present, return an empty form - if engagement is None: - return CredMappingForm() - # Otherwise get all creds in the associated engagement - return CredMappingForm( - initial={ - "cred_user_queryset": Cred_Mapping.objects.filter( - engagement=engagement, - ).order_by("cred_id"), - }, - ) + # If the engagement is not present, return an empty form + if engagement is None: + return CredMappingForm() + # Otherwise get all creds in the associated engagement + return CredMappingForm( + initial={ + "cred_user_queryset": Cred_Mapping.objects.filter( + engagement=engagement, + ).order_by("cred_id"), + }, + ) def get_jira_form( self, @@ -1250,7 +1242,7 @@ def add_risk_acceptance(request, eid, fid=None): findings = form.cleaned_data["accepted_findings"] - risk_acceptance = ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings) + risk_acceptance = ra_helper.add_findings_to_risk_acceptance(request.user, risk_acceptance, findings) messages.add_message( request, @@ -1360,7 +1352,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): finding = get_object_or_404( Finding, pk=request.POST["remove_finding_id"]) - ra_helper.remove_finding_from_risk_acceptance(risk_acceptance, finding) + ra_helper.remove_finding_from_risk_acceptance(request.user, risk_acceptance, finding) messages.add_message( request, @@ -1391,7 +1383,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): if not errors: findings = add_findings_form.cleaned_data["accepted_findings"] - ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings) + ra_helper.add_findings_to_risk_acceptance(request.user, risk_acceptance, findings) messages.add_message( request, @@ -1401,8 +1393,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): if not errors: logger.debug("redirecting to return_url") return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid))) - else: - logger.error("errors found") + logger.error("errors found") else: if edit_mode: @@ -1549,8 +1540,7 @@ def upload_threatmodel(request, eid): @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") def view_threatmodel(request, eid): eng = get_object_or_404(Engagement, pk=eid) - response = FileResponse(open(eng.tmodel_path, "rb")) - return response + return FileResponse(open(eng.tmodel_path, "rb")) @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") @@ -1589,9 +1579,8 @@ def get_engagements(request): if not url: msg = "Please use the export button when exporting engagements" raise ValidationError(msg) - else: - if url.startswith("url="): - url = url[4:] + if url.startswith("url="): + url = url[4:] path_items = list(filter(None, re.split(r"/|\?", url))) diff --git a/dojo/filters.py b/dojo/filters.py index 1461966c19e..35ceb205938 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -331,8 +331,7 @@ def get_tags_model_from_field_name(field): def get_tags_label_from_model(model): if model: return f"Tags ({model.__name__.title()})" - else: - return "Tags (Unknown)" + return "Tags (Unknown)" def get_finding_filterset_fields(metrics=False, similar=False, filter_string_matching=False): @@ -780,6 +779,7 @@ def any(self, qs, name): self.start_date = _truncate(start_date - timedelta(days=1)) self.end_date = _truncate(now() + timedelta(days=1)) return qs.all() + return None def current_month(self, qs, name): self.start_date = local_tz.localize( @@ -1927,8 +1927,7 @@ def set_hash_codes(self, *args: list, **kwargs: dict): def filter_queryset(self, *args: list, **kwargs: dict): queryset = super().filter_queryset(*args, **kwargs) queryset = get_authorized_findings(Permissions.Finding_View, queryset, self.user) - queryset = queryset.exclude(pk=self.finding.pk) - return queryset + return queryset.exclude(pk=self.finding.pk) class SimilarFindingFilter(FindingFilter, SimilarFindingHelper): diff --git a/dojo/finding/queries.py b/dojo/finding/queries.py index 7f213805a49..47386e43f86 100644 --- a/dojo/finding/queries.py +++ b/dojo/finding/queries.py @@ -68,14 +68,12 @@ def get_authorized_findings(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_stub_findings(permission): user = get_current_user() @@ -101,14 +99,12 @@ def get_authorized_stub_findings(permission): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)).order_by("id") - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_vulnerability_ids(permission, queryset=None, user=None): @@ -151,10 +147,8 @@ def get_authorized_vulnerability_ids(permission, queryset=None, user=None): finding__test__engagement__product__member=Exists(authorized_product_roles), finding__test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists(authorized_product_groups)) - vulnerability_ids = vulnerability_ids.filter( + return vulnerability_ids.filter( Q(finding__test__engagement__product__prod_type__member=True) | Q(finding__test__engagement__product__member=True) | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) - - return vulnerability_ids diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 4b37ebc8a9a..ea5578ee460 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -311,31 +311,29 @@ def get_test_id(self): def filter_findings_by_object(self, findings: QuerySet[Finding]): if product_id := self.get_product_id(): return findings.filter(test__engagement__product__id=product_id) - elif engagement_id := self.get_engagement_id(): + if engagement_id := self.get_engagement_id(): return findings.filter(test__engagement=engagement_id) - elif test_id := self.get_test_id(): + if test_id := self.get_test_id(): return findings.filter(test=test_id) - else: - return findings + return findings def filter_findings_by_filter_name(self, findings: QuerySet[Finding]): filter_name = self.get_filter_name() if filter_name == "Open": return findings.filter(finding_helper.OPEN_FINDINGS_QUERY) - elif filter_name == "Verified": + if filter_name == "Verified": return findings.filter(finding_helper.VERIFIED_FINDINGS_QUERY) - elif filter_name == "Out of Scope": + if filter_name == "Out of Scope": return findings.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY) - elif filter_name == "False Positive": + if filter_name == "False Positive": return findings.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY) - elif filter_name == "Inactive": + if filter_name == "Inactive": return findings.filter(finding_helper.INACTIVE_FINDINGS_QUERY) - elif filter_name == "Accepted": + if filter_name == "Accepted": return findings.filter(finding_helper.ACCEPTED_FINDINGS_QUERY) - elif filter_name == "Closed": + if filter_name == "Closed": return findings.filter(finding_helper.CLOSED_FINDINGS_QUERY) - else: - return findings + return findings def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Finding]): # Set up the args for the form @@ -358,9 +356,7 @@ def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Findi def get_filtered_findings(self): findings = get_authorized_findings(Permissions.Finding_View).order_by(self.get_order_by()) findings = self.filter_findings_by_object(findings) - findings = self.filter_findings_by_filter_name(findings) - - return findings + return self.filter_findings_by_filter_name(findings) def get_fully_filtered_findings(self, request: HttpRequest): findings = self.get_filtered_findings() @@ -991,10 +987,10 @@ def process_finding_form(self, request: HttpRequest, finding: Finding, context: # Handle risk exception related things if "risk_accepted" in context["form"].cleaned_data and context["form"]["risk_accepted"].value(): if new_finding.test.engagement.product.enable_simple_risk_acceptance: - ra_helper.simple_risk_accept(new_finding, perform_save=False) + ra_helper.simple_risk_accept(request.user, new_finding, perform_save=False) else: if new_finding.risk_accepted: - ra_helper.risk_unaccept(new_finding, perform_save=False) + ra_helper.risk_unaccept(request.user, new_finding, perform_save=False) # Save and add new endpoints finding_helper.add_endpoints(new_finding, context["form"]) # Remove unrelated endpoints @@ -1017,9 +1013,8 @@ def process_finding_form(self, request: HttpRequest, finding: Finding, context: ) return finding, request, True - else: - add_error_message_to_response("The form has errors, please correct them below.") - add_field_errors_to_response(context["form"]) + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) return finding, request, False @@ -1074,8 +1069,7 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic ) return request, True, push_to_jira - else: - add_field_errors_to_response(context["jform"]) + add_field_errors_to_response(context["jform"]) return request, False, False @@ -1090,8 +1084,7 @@ def process_github_form(self, request: HttpRequest, finding: Finding, context: d add_external_issue(finding, "github") return request, True - else: - add_field_errors_to_response(context["gform"]) + add_field_errors_to_response(context["gform"]) return request, False @@ -1270,7 +1263,7 @@ def close_finding(request, fid): status.last_modified = timezone.now() status.save() # Clear the risk acceptance, if present - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) # Manage the jira status changes push_to_jira = False @@ -1316,10 +1309,9 @@ def close_finding(request, fid): return HttpResponseRedirect( reverse("view_test", args=(finding.test.id,)), ) - else: - return HttpResponseRedirect( - reverse("close_finding", args=(finding.id,)), - ) + return HttpResponseRedirect( + reverse("close_finding", args=(finding.id,)), + ) product_tab = Product_Tab( finding.test.engagement.product, title="Close", tab="findings", @@ -1446,7 +1438,7 @@ def reopen_finding(request, fid): status.last_modified = timezone.now() status.save() # Clear the risk acceptance, if present - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) # Manage the jira status changes push_to_jira = False @@ -1502,15 +1494,14 @@ def apply_template_cwe(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_finding", args=(fid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to apply CWE template finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to apply CWE template finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") @@ -1549,13 +1540,12 @@ def copy_finding(request, fid): return redirect_to_return_url_or_else( request, reverse("view_test", args=(test.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy finding, please try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to copy finding, please try again.", + extra_tags="alert-danger", + ) product_tab = Product_Tab(product, title="Copy Finding", tab="findings") return render( @@ -1626,7 +1616,7 @@ def simple_risk_accept(request, fid): if not finding.test.engagement.product.enable_simple_risk_acceptance: raise PermissionDenied - ra_helper.simple_risk_accept(finding) + ra_helper.simple_risk_accept(request.user, finding) messages.add_message( request, messages.WARNING, "Finding risk accepted.", extra_tags="alert-success", @@ -1640,7 +1630,7 @@ def simple_risk_accept(request, fid): @user_is_authorized(Finding, Permissions.Risk_Acceptance, "fid") def risk_unaccept(request, fid): finding = get_object_or_404(Finding, id=fid) - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) messages.add_message( request, @@ -2002,8 +1992,7 @@ def apply_template_to_finding(request, fid, tid): ) return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) - else: - return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) + return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) @user_is_authorized(Test, Permissions.Finding_Add, "tid") @@ -2063,15 +2052,14 @@ def delete_stub_finding(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_test", args=(tid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete potential finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete potential finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Stub_Finding, Permissions.Finding_Edit, "fid") @@ -2188,13 +2176,12 @@ def promote_to_finding(request, fid): ) return HttpResponseRedirect(reverse("view_test", args=(test.id,))) - else: - form_error = True - add_error_message_to_response( - "The form has errors, please correct them below.", - ) - add_field_errors_to_response(jform) - add_field_errors_to_response(form) + form_error = True + add_error_message_to_response( + "The form has errors, please correct them below.", + ) + add_field_errors_to_response(jform) + add_field_errors_to_response(form) else: form = PromoteFindingForm( initial={ @@ -2356,13 +2343,12 @@ def add_template(request): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) add_breadcrumb(title="Add Template", top_level=False, request=request) return render( request, "dojo/add_template.html", {"form": form, "name": "Add Template"}, @@ -2411,13 +2397,12 @@ def edit_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) count = apply_cwe_mitigation(apply_to_findings=True, template=template, update=False) add_breadcrumb(title="Edit Template", top_level=False, request=request) @@ -2447,15 +2432,14 @@ def delete_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete Template, please revise and try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete Template, please revise and try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied def download_finding_pic(request, token): @@ -2661,13 +2645,12 @@ def merge_finding_product(request, pid): return HttpResponseRedirect( reverse("edit_finding", args=(finding_to_merge_into.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to merge findings. Findings to merge contained in finding to merge into.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to merge findings. Findings to merge contained in finding to merge into.", + extra_tags="alert-danger", + ) else: messages.add_message( request, @@ -2851,9 +2834,9 @@ def finding_bulk_update_all(request, pid=None): ): skipped_risk_accept_count += 1 else: - ra_helper.simple_risk_accept(finding) + ra_helper.simple_risk_accept(request.user, finding) elif form.cleaned_data["risk_unaccept"]: - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(request.user, finding) for prod in prods: calculate_grade(prod) @@ -3137,8 +3120,7 @@ def find_available_notetypes(notes): break else: available_note_types.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=available_note_types).order_by("-id") - return queryset + return Note_Type.objects.filter(id__in=available_note_types).order_by("-id") def get_missing_mandatory_notetypes(finding): @@ -3153,8 +3135,7 @@ def get_missing_mandatory_notetypes(finding): break else: notes_to_be_added.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=notes_to_be_added) - return queryset + return Note_Type.objects.filter(id__in=notes_to_be_added) @user_is_authorized(Finding, Permissions.Finding_Edit, "original_id") diff --git a/dojo/finding_group/queries.py b/dojo/finding_group/queries.py index aae57f53c83..39b91c02665 100644 --- a/dojo/finding_group/queries.py +++ b/dojo/finding_group/queries.py @@ -46,10 +46,8 @@ def get_authorized_finding_groups(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - finding_groups = finding_groups.filter( + return finding_groups.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - - return finding_groups diff --git a/dojo/fixtures/dojo_testdata.json b/dojo/fixtures/dojo_testdata.json index 62486cb90cf..ae550f8bf81 100644 --- a/dojo/fixtures/dojo_testdata.json +++ b/dojo/fixtures/dojo_testdata.json @@ -227,6 +227,7 @@ "url_prefix": "", "enable_slack_notifications": false, "enable_mail_notifications": false, + "enable_webhooks_notifications": true, "email_from": "no-reply@example.com", "false_positive_history": false, "msteams_url": "", @@ -2926,11 +2927,27 @@ "pk": 1, "model": "dojo.notifications", "fields": { - "product": 1, - "user": 2, - "product_type_added": [ - "slack" - ] + "product": null, + "user": null, + "template": false, + "product_type_added": "webhooks,alert", + "product_added": "webhooks,alert", + "engagement_added": "webhooks,alert", + "test_added": "webhooks,alert", + "scan_added": "webhooks,alert", + "scan_added_empty": "webhooks", + "jira_update": "alert", + "upcoming_engagement": "alert", + "stale_engagement": "alert", + "auto_close_engagement": "alert", + "close_engagement": "alert", + "user_mentioned": "alert", + "code_review": "alert", + "review_requested": "alert", + "other": "alert", + "sla_breach": "alert", + "risk_acceptance_expiration": "alert", + "sla_breach_combined": "alert" } }, { @@ -3045,5 +3062,35 @@ "dismissable": true, "style": "danger" } + }, + { + "model": "dojo.notification_webhooks", + "pk": 1, + "fields": { + "name": "My webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token xxx", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": null + } + }, + { + "model": "dojo.notification_webhooks", + "pk": 2, + "fields": { + "name": "My personal webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token secret", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": 2 + } } ] \ No newline at end of file diff --git a/dojo/fixtures/unit_metrics_additional_data.json b/dojo/fixtures/unit_metrics_additional_data.json new file mode 100644 index 00000000000..721e47eaac6 --- /dev/null +++ b/dojo/fixtures/unit_metrics_additional_data.json @@ -0,0 +1,482 @@ +[ + { + "pk": 240, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null + } + }, + { + "pk": 241, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null, + "risk_accepted": true + } + }, + { + "pk": 242, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-01", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "High Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": null, + "file_path": "", + "duplicate_finding": 2, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": null, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "5d368a051fdec959e08315a32ef633ba5711bed6e8e75319ddee2cab4d4608c7", + "last_reviewed": null, + "risk_accepted": true + } + }, + { + "pk": 243, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-31", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "High", + "false_p": false, + "verified": false, + "severity": "High", + "title": "DUMMY FINDING", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "MITIGATION", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 3, + "out_of_scope": false, + "cwe": 1, + "file_path": "", + "duplicate_finding": null, + "description": "TEST finding", + "mitigated_by": null, + "reporter": 2, + "mitigated": null, + "active": false, + "line": 100, + "under_review": false, + "defect_review_requested_by": 2, + "review_requested_by": 2, + "thread_id": 1, + "url": "http://www.example.com", + "notes": [ + 1 + ], + "dynamic_finding": false, + "hash_code": "c89d25e445b088ba339908f68e15e3177b78d22f3039d1bfea51c4be251bf4e0", + "last_reviewed": null, + "risk_accepted": true, + "is_mitigated": true + } + }, + { + "pk": 244, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-29", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": true, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 245, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-27", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 22, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 246, + "model": "dojo.finding", + "fields": { + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-02", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 33, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 22, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 247, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 12345, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-03", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "Low Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 55, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "9aca00affd340c4da02c934e7e3106a45c6ad0911da479daae421b3b28a2c1aa", + "last_reviewed": null + } + }, + { + "pk": 248, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 6789, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2017-12-27", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": true, + "severity": "Low", + "title": "UID Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": false, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 77, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": null, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": true, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "6f8d0bf970c14175e597843f4679769a4775742549d90f902ff803de9244c7e1", + "last_reviewed": null, + "is_mitigated": true + } + }, + { + "pk": 249, + "model": "dojo.finding", + "fields": { + "unique_id_from_tool": 6789, + "last_reviewed_by": null, + "reviewers": [], + "static_finding": false, + "date": "2018-01-04", + "references": "", + "files": [], + "payload": null, + "under_defect_review": false, + "impact": "Low", + "false_p": false, + "verified": false, + "severity": "Low", + "title": "UID Impact Test Finding", + "param": null, + "created": "2017-12-01T00:00:00Z", + "duplicate": true, + "mitigation": "test mitigation", + "found_by": [ + 1 + ], + "numerical_severity": "S0", + "test": 77, + "out_of_scope": false, + "cwe": null, + "file_path": "/dev/urandom", + "duplicate_finding": 224, + "description": "test finding", + "mitigated_by": null, + "reporter": 1, + "mitigated": null, + "active": false, + "line": 123, + "under_review": false, + "defect_review_requested_by": 1, + "review_requested_by": 1, + "thread_id": 11, + "url": null, + "notes": [], + "dynamic_finding": false, + "hash_code": "6f8d0bf970c14175e597843f4679769a4775742549d90f902ff803de9244c7e1", + "last_reviewed": null + } + } +] \ No newline at end of file diff --git a/dojo/forms.py b/dojo/forms.py index dde58a38b61..abd5a40d9ce 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -72,6 +72,7 @@ JIRA_Project, Note_Type, Notes, + Notification_Webhooks, Notifications, Objects_Product, Product, @@ -596,8 +597,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data["endpoints_to_add"]) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_add_list = endpoints_to_add_list + self.endpoints_to_add_list = endpoints_to_add_list return cleaned_data @@ -610,8 +610,7 @@ def clean_scan_date(self): return date def get_scan_type(self): - TGT_scan = self.cleaned_data["scan_type"] - return TGT_scan + return self.cleaned_data["scan_type"] class ReImportScanForm(forms.Form): @@ -1145,8 +1144,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data["endpoints_to_add"]) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_add_list = endpoints_to_add_list + self.endpoints_to_add_list = endpoints_to_add_list return cleaned_data @@ -1223,8 +1221,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data["endpoints_to_add"]) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_add_list = endpoints_to_add_list + self.endpoints_to_add_list = endpoints_to_add_list return cleaned_data @@ -1281,8 +1278,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data["endpoints_to_add"]) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_add_list = endpoints_to_add_list + self.endpoints_to_add_list = endpoints_to_add_list return cleaned_data @@ -1405,8 +1401,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data["endpoints_to_add"]) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_add_list = endpoints_to_add_list + self.endpoints_to_add_list = endpoints_to_add_list return cleaned_data @@ -1676,8 +1671,7 @@ def clean(self): endpoints_to_add_list, errors = validate_endpoints_to_add(endpoint) if errors: raise forms.ValidationError(errors) - else: - self.endpoints_to_process = endpoints_to_add_list + self.endpoints_to_process = endpoints_to_add_list return cleaned_data @@ -2168,8 +2162,9 @@ def clean(self): class AddDojoUserForm(forms.ModelForm): + email = forms.EmailField(required=True) password = forms.CharField(widget=forms.PasswordInput, - required=False, + required=True, validators=[validate_password], help_text="") @@ -2186,6 +2181,7 @@ def __init__(self, *args, **kwargs): class EditDojoUserForm(forms.ModelForm): + email = forms.EmailField(required=True) class Meta: model = Dojo_User @@ -2682,9 +2678,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def clean(self): - form_data = self.cleaned_data - - return form_data + return self.cleaned_data class CredMappingForm(forms.ModelForm): @@ -2778,6 +2772,32 @@ class Meta: exclude = ["template"] +class NotificationsWebhookForm(forms.ModelForm): + class Meta: + model = Notification_Webhooks + exclude = [] + + def __init__(self, *args, **kwargs): + is_superuser = kwargs.pop("is_superuser", False) + super().__init__(*args, **kwargs) + if not is_superuser: # Only superadmins can edit owner + self.fields["owner"].disabled = True # TODO: needs to be tested + + +class DeleteNotificationsWebhookForm(forms.ModelForm): + id = forms.IntegerField(required=True, + widget=forms.widgets.HiddenInput()) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["name"].disabled = True + self.fields["url"].disabled = True + + class Meta: + model = Notification_Webhooks + fields = ["id", "name", "url"] + + class ProductNotificationsForm(forms.ModelForm): def __init__(self, *args, **kwargs): @@ -2945,9 +2965,9 @@ def clean(self): if self.target == "engagement": msg = "JIRA Project needs a JIRA Instance, JIRA Project Key, and Epic issue type name, or choose to inherit settings from product" raise ValidationError(msg) - else: - msg = "JIRA Project needs a JIRA Instance, JIRA Project Key, and Epic issue type name, leave empty to have no JIRA integration setup" - raise ValidationError(msg) + msg = "JIRA Project needs a JIRA Instance, JIRA Project Key, and Epic issue type name, leave empty to have no JIRA integration setup" + raise ValidationError(msg) + return None class GITHUBFindingForm(forms.Form): @@ -3131,8 +3151,7 @@ class LoginBanner(forms.Form): ) def clean(self): - cleaned_data = super().clean() - return cleaned_data + return super().clean() class AnnouncementCreateForm(forms.ModelForm): @@ -3366,7 +3385,7 @@ def clean_expiration(self): if expiration < today: msg = "The expiration cannot be in the past" raise forms.ValidationError(msg) - elif expiration.day == today.day: + if expiration.day == today.day: msg = "The expiration cannot be today" raise forms.ValidationError(msg) else: @@ -3456,8 +3475,7 @@ def __init__(self, attrs=None): def decompress(self, value): if value: return pickle.loads(value) - else: - return [None, None, None, None, None, None] + return [None, None, None, None, None, None] def format_output(self, rendered_widgets): return "
".join(rendered_widgets) diff --git a/dojo/github_issue_link/views.py b/dojo/github_issue_link/views.py index f7bb90a37f2..e0ddabd1deb 100644 --- a/dojo/github_issue_link/views.py +++ b/dojo/github_issue_link/views.py @@ -52,11 +52,11 @@ def new_github(request): "Unable to authenticate on GitHub.", extra_tags="alert-danger") return HttpResponseRedirect(reverse("github")) - else: - gform = GITHUBForm() - add_breadcrumb(title="New GitHub Configuration", top_level=False, request=request) - return render(request, "dojo/new_github.html", - {"gform": gform}) + return None + gform = GITHUBForm() + add_breadcrumb(title="New GitHub Configuration", top_level=False, request=request) + return render(request, "dojo/new_github.html", + {"gform": gform}) @user_is_configuration_authorized("dojo.view_github_conf") diff --git a/dojo/group/queries.py b/dojo/group/queries.py index a8b70e6b761..dedb0d35e14 100644 --- a/dojo/group/queries.py +++ b/dojo/group/queries.py @@ -38,8 +38,7 @@ def get_authorized_group_members(permission): def get_authorized_group_members_for_user(user): groups = get_authorized_groups(Permissions.Group_View) - group_members = Dojo_Group_Member.objects.filter(user=user, group__in=groups).order_by("group__name").select_related("role", "group") - return group_members + return Dojo_Group_Member.objects.filter(user=user, group__in=groups).order_by("group__name").select_related("role", "group") def get_group_members_for_group(group): diff --git a/dojo/group/views.py b/dojo/group/views.py index 4f7dea473b5..fa2fd1e65b1 100644 --- a/dojo/group/views.py +++ b/dojo/group/views.py @@ -185,12 +185,11 @@ def process_forms(self, request: HttpRequest, group: Dojo_Group, context: dict): extra_tags="alert-success") return request, True - else: - messages.add_message( - request, - messages.ERROR, - "Group was not saved successfully.", - extra_tags="alert_danger") + messages.add_message( + request, + messages.ERROR, + "Group was not saved successfully.", + extra_tags="alert_danger") return request, False @@ -450,8 +449,7 @@ def edit_group_member(request, mid): extra_tags="alert-warning") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) + return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) if member.role.is_owner and not user_has_permission(request.user, member.group, Permissions.Group_Add_Owner): messages.add_message(request, messages.WARNING, @@ -465,8 +463,7 @@ def edit_group_member(request, mid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) + return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) add_breadcrumb(title="Edit a Group Member", top_level=False, request=request) return render(request, "dojo/edit_group_member.html", { @@ -492,8 +489,7 @@ def delete_group_member(request, mid): extra_tags="alert-warning") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) + return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) user = member.user member.delete() @@ -503,11 +499,9 @@ def delete_group_member(request, mid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - if user == request.user: - return HttpResponseRedirect(reverse("groups")) - else: - return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) + if user == request.user: + return HttpResponseRedirect(reverse("groups")) + return HttpResponseRedirect(reverse("view_group", args=(member.group.id, ))) add_breadcrumb("Delete a group member", top_level=False, request=request) return render(request, "dojo/delete_group_member.html", { diff --git a/dojo/importers/auto_create_context.py b/dojo/importers/auto_create_context.py index 6325ece9699..9f2a1cb7e76 100644 --- a/dojo/importers/auto_create_context.py +++ b/dojo/importers/auto_create_context.py @@ -229,16 +229,15 @@ def get_or_create_product_type( # Look for an existing object if product_type := self.get_target_product_type_if_exists(product_type_name=product_type_name): return product_type - else: - with transaction.atomic(): - product_type, created = Product_Type.objects.select_for_update().get_or_create(name=product_type_name) - if created: - Product_Type_Member.objects.create( - user=get_current_user(), - product_type=product_type, - role=Role.objects.get(is_owner=True), - ) - return product_type + with transaction.atomic(): + product_type, created = Product_Type.objects.select_for_update().get_or_create(name=product_type_name) + if created: + Product_Type_Member.objects.create( + user=get_current_user(), + product_type=product_type, + role=Role.objects.get(is_owner=True), + ) + return product_type def get_or_create_product( self, diff --git a/dojo/importers/base_importer.py b/dojo/importers/base_importer.py index 22e9ee5cbfe..6a05b3934ce 100644 --- a/dojo/importers/base_importer.py +++ b/dojo/importers/base_importer.py @@ -255,11 +255,10 @@ def determine_process_method( parsed_findings, **kwargs, ) - else: - return self.sync_process_findings( - parsed_findings, - **kwargs, - ) + return self.sync_process_findings( + parsed_findings, + **kwargs, + ) def update_test_meta(self): """ @@ -276,7 +275,7 @@ def update_test_meta(self): if not self.commit_hash.isspace(): self.test.commit_hash = self.commit_hash - return None + return def update_timestamps(self): """ @@ -510,7 +509,7 @@ def verify_tool_configuration_from_test(self): # Return early as there is no value in validating further return # Validate that the test has a value - elif self.test is not None: + if self.test is not None: # Make sure the Tool_Configuration is connected to the product that the test is if self.api_scan_configuration.product != self.test.engagement.product: msg = "API Scan Configuration has to be from same product as the Test" @@ -536,7 +535,7 @@ def verify_tool_configuration_from_engagement(self): # Return early as there is no value in validating further return # Validate that the engagement has a value - elif self.engagement is not None: + if self.engagement is not None: # Make sure the Tool_Configuration is connected to the engagement that the test is if self.api_scan_configuration.product != self.engagement.product: msg = "API Scan Configuration has to be from same product as the Engagement" diff --git a/dojo/importers/default_reimporter.py b/dojo/importers/default_reimporter.py index 290e13f6ac5..9063838c73d 100644 --- a/dojo/importers/default_reimporter.py +++ b/dojo/importers/default_reimporter.py @@ -399,12 +399,12 @@ def match_new_finding_to_existing_finding( test=self.test, hash_code=unsaved_finding.hash_code, ).exclude(hash_code=None).order_by("id") - elif self.deduplication_algorithm == "unique_id_from_tool": + if self.deduplication_algorithm == "unique_id_from_tool": return Finding.objects.filter( test=self.test, unique_id_from_tool=unsaved_finding.unique_id_from_tool, ).exclude(unique_id_from_tool=None).order_by("id") - elif self.deduplication_algorithm == "unique_id_from_tool_or_hash_code": + if self.deduplication_algorithm == "unique_id_from_tool_or_hash_code": query = Finding.objects.filter( Q(test=self.test), (Q(hash_code__isnull=False) & Q(hash_code=unsaved_finding.hash_code)) @@ -412,7 +412,7 @@ def match_new_finding_to_existing_finding( ).order_by("id") deduplicationLogger.debug(query.query) return query - elif self.deduplication_algorithm == "legacy": + if self.deduplication_algorithm == "legacy": # This is the legacy reimport behavior. Although it's pretty flawed and doesn't match the legacy algorithm for deduplication, # this is left as is for simplicity. # Re-writing the legacy deduplication here would be complicated and counter-productive. @@ -423,9 +423,8 @@ def match_new_finding_to_existing_finding( test=self.test, severity=unsaved_finding.severity, numerical_severity=Finding.get_numerical_severity(unsaved_finding.severity)).order_by("id") - else: - logger.error(f'Internal error: unexpected deduplication_algorithm: "{self.deduplication_algorithm}"') - return None + logger.error(f'Internal error: unexpected deduplication_algorithm: "{self.deduplication_algorithm}"') + return None def process_matched_finding( self, @@ -441,16 +440,15 @@ def process_matched_finding( unsaved_finding, existing_finding, ) - elif existing_finding.is_mitigated: + if existing_finding.is_mitigated: return self.process_matched_mitigated_finding( unsaved_finding, existing_finding, ) - else: - return self.process_matched_active_finding( - unsaved_finding, - existing_finding, - ) + return self.process_matched_active_finding( + unsaved_finding, + existing_finding, + ) def process_matched_special_status_finding( self, @@ -480,7 +478,7 @@ def process_matched_special_status_finding( # We also need to add the finding to 'unchanged_items' as otherwise it will get mitigated by the reimporter # (Risk accepted findings are not set to mitigated by Defectdojo) # We however do not exit the loop as we do want to update the endpoints (in case some endpoints were fixed) - elif existing_finding.risk_accepted and not existing_finding.active: + if existing_finding.risk_accepted and not existing_finding.active: self.unchanged_items.append(existing_finding) return existing_finding, False # The finding was not an exact match, so we need to add more details about from the @@ -521,47 +519,44 @@ def process_matched_mitigated_finding( logger.debug(msg) # Return True here to force the loop to continue return existing_finding, True - else: - # even if there is no mitigation time, skip it, because both the current finding and - # the reimported finding are is_mitigated - # Return True here to force the loop to continue - return existing_finding, True - else: - if self.do_not_reactivate: - logger.debug( - "Skipping reactivating by user's choice do_not_reactivate: " - f" - {existing_finding.id}: {existing_finding.title} " - f"({existing_finding.component_name} - {existing_finding.component_version})", - ) - # Search for an existing note that this finding has been skipped for reactivation - # before this current time - reactivated_note_text = f"Finding has skipped reactivation from {self.scan_type} re-upload with user decision do_not_reactivate." - existing_note = existing_finding.notes.filter( + # even if there is no mitigation time, skip it, because both the current finding and + # the reimported finding are is_mitigated + # Return True here to force the loop to continue + return existing_finding, True + if self.do_not_reactivate: + logger.debug( + "Skipping reactivating by user's choice do_not_reactivate: " + f" - {existing_finding.id}: {existing_finding.title} " + f"({existing_finding.component_name} - {existing_finding.component_version})", + ) + # Search for an existing note that this finding has been skipped for reactivation + # before this current time + reactivated_note_text = f"Finding has skipped reactivation from {self.scan_type} re-upload with user decision do_not_reactivate." + existing_note = existing_finding.notes.filter( + entry=reactivated_note_text, + author=self.user, + ) + # If a note has not been left before, we can skip this finding + if len(existing_note) == 0: + note = Notes( entry=reactivated_note_text, author=self.user, ) - # If a note has not been left before, we can skip this finding - if len(existing_note) == 0: - note = Notes( - entry=reactivated_note_text, - author=self.user, - ) - note.save() - existing_finding.notes.add(note) - existing_finding.save(dedupe_option=False) - # Return True here to force the loop to continue - return existing_finding, True - else: - logger.debug( - f"Reactivating: - {existing_finding.id}: {existing_finding.title} " - f"({existing_finding.component_name} - {existing_finding.component_version})", - ) - existing_finding.mitigated = None - existing_finding.is_mitigated = False - existing_finding.mitigated_by = None - existing_finding.active = True - if self.verified is not None: - existing_finding.verified = self.verified + note.save() + existing_finding.notes.add(note) + existing_finding.save(dedupe_option=False) + # Return True here to force the loop to continue + return existing_finding, True + logger.debug( + f"Reactivating: - {existing_finding.id}: {existing_finding.title} " + f"({existing_finding.component_name} - {existing_finding.component_version})", + ) + existing_finding.mitigated = None + existing_finding.is_mitigated = False + existing_finding.mitigated_by = None + existing_finding.active = True + if self.verified is not None: + existing_finding.verified = self.verified component_name = getattr(unsaved_finding, "component_name", None) component_version = getattr(unsaved_finding, "component_version", None) @@ -706,9 +701,8 @@ def finding_post_processing( # Process vulnerability IDs if finding_from_report.unsaved_vulnerability_ids: finding.unsaved_vulnerability_ids = finding_from_report.unsaved_vulnerability_ids - finding = self.process_vulnerability_ids(finding) - return finding + return self.process_vulnerability_ids(finding) def process_groups_for_all_findings( self, @@ -767,8 +761,7 @@ def process_results( serialized_to_mitigate, serialized_untouched, ) - else: - return self.new_items, self.reactivated_items, self.to_mitigate, self.untouched + return self.new_items, self.reactivated_items, self.to_mitigate, self.untouched def calculate_unsaved_finding_hash_code( self, diff --git a/dojo/importers/endpoint_manager.py b/dojo/importers/endpoint_manager.py index 2ee3e7d3009..ba7172efaa3 100644 --- a/dojo/importers/endpoint_manager.py +++ b/dojo/importers/endpoint_manager.py @@ -57,7 +57,7 @@ def add_endpoints_to_unsaved_finding( endpoint=ep, defaults={"date": finding.date}) logger.debug(f"IMPORT_SCAN: {len(endpoints)} imported") - return None + return @dojo_async_task @app.task() @@ -79,7 +79,7 @@ def mitigate_endpoint_status( endpoint_status.mitigated_by = user endpoint_status.mitigated = True endpoint_status.save() - return None + return @dojo_async_task @app.task() @@ -100,7 +100,7 @@ def reactivate_endpoint_status( endpoint_status.mitigated = False endpoint_status.last_modified = timezone.now() endpoint_status.save() - return None + return def chunk_endpoints( self, @@ -158,7 +158,7 @@ def clean_unsaved_endpoints( endpoint.clean() except ValidationError as e: logger.warning(f"DefectDojo is storing broken endpoint because cleaning wasn't successful: {e}") - return None + return def chunk_endpoints_and_reactivate( self, @@ -182,7 +182,7 @@ def chunk_endpoints_and_reactivate( self.reactivate_endpoint_status(endpoint_status_list, sync=False) else: self.reactivate_endpoint_status(endpoint_status_list, sync=True) - return None + return def chunk_endpoints_and_mitigate( self, @@ -207,7 +207,7 @@ def chunk_endpoints_and_mitigate( self.mitigate_endpoint_status(endpoint_status_list, user, sync=False) else: self.mitigate_endpoint_status(endpoint_status_list, user, sync=True) - return None + return def update_endpoint_status( self, @@ -242,4 +242,4 @@ def update_endpoint_status( ) self.chunk_endpoints_and_reactivate(endpoint_status_to_reactivate) self.chunk_endpoints_and_mitigate(endpoint_status_to_mitigate, user) - return None + return diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index b5e3ba8b219..85d5e1bbbbe 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -99,6 +99,7 @@ def is_push_all_issues(instance): jira_project = get_jira_project(instance) if jira_project: return jira_project.push_all_issues + return None # checks if a finding can be pushed to JIRA @@ -173,12 +174,11 @@ def get_jira_project(obj, use_inheritance=True): if obj.jira_project: return obj.jira_project # some old jira_issue records don't have a jira_project, so try to go via the finding instead - elif hasattr(obj, "finding") and obj.finding: + if hasattr(obj, "finding") and obj.finding: return get_jira_project(obj.finding, use_inheritance=use_inheritance) - elif hasattr(obj, "engagement") and obj.engagement: + if hasattr(obj, "engagement") and obj.engagement: return get_jira_project(obj.finding, use_inheritance=use_inheritance) - else: - return None + return None if isinstance(obj, Finding) or isinstance(obj, Stub_Finding): finding = obj @@ -205,9 +205,8 @@ def get_jira_project(obj, use_inheritance=True): if use_inheritance: logger.debug("delegating to product %s for %s", engagement.product, engagement) return get_jira_project(engagement.product) - else: - logger.debug("not delegating to product %s for %s", engagement.product, engagement) - return None + logger.debug("not delegating to product %s for %s", engagement.product, engagement) + return None if isinstance(obj, Product): # TODO: refactor relationships, but now this would brake APIv1 (and v2?) @@ -241,7 +240,7 @@ def get_jira_url(obj): issue = get_jira_issue(obj) if issue is not None: return get_jira_issue_url(issue) - elif isinstance(obj, Finding): + if isinstance(obj, Finding): # finding must only have url if there is a jira_issue # engagement can continue to show url of jiraproject instead of jira issue return None @@ -320,8 +319,7 @@ def get_jira_issue_template(obj): if isinstance(obj, Finding_Group): return os.path.join(template_dir, "jira-finding-group-description.tpl") - else: - return os.path.join(template_dir, "jira-description.tpl") + return os.path.join(template_dir, "jira-description.tpl") def get_jira_creation(obj): @@ -357,6 +355,7 @@ def get_jira_issue(obj): return obj.jira_issue except JIRA_Issue.DoesNotExist: return None + return None def has_jira_configured(obj): @@ -424,6 +423,7 @@ def get_jira_connection(obj): if jira_instance is not None: return get_jira_connection_raw(jira_instance.url, jira_instance.username, jira_instance.password) + return None def jira_get_resolution_id(jira, issue, status): @@ -468,6 +468,7 @@ def get_jira_updated(finding): project = get_jira_project(finding) issue = jira_get_issue(project, j_issue) return issue.fields.updated + return None # Used for unit testing so geting all the connections is manadatory @@ -481,6 +482,7 @@ def get_jira_status(finding): project = get_jira_project(finding) issue = jira_get_issue(project, j_issue) return issue.fields.status + return None # Used for unit testing so geting all the connections is manadatory @@ -494,6 +496,7 @@ def get_jira_comments(finding): project = get_jira_project(finding) issue = jira_get_issue(project, j_issue) return issue.fields.comment.comments + return None # Logs the error to the alerts table, which appears in the notification toolbar @@ -617,7 +620,7 @@ def jira_priority(obj): def jira_environment(obj): if isinstance(obj, Finding): return "\n".join([str(endpoint) for endpoint in obj.endpoints.all()]) - elif isinstance(obj, Finding_Group): + if isinstance(obj, Finding_Group): envs = [ jira_environment(finding) for finding in obj.findings.all() @@ -625,8 +628,7 @@ def jira_environment(obj): jira_environments = [env for env in envs if env] return "\n".join(jira_environments) - else: - return "" + return "" def push_to_jira(obj, *args, **kwargs): @@ -638,25 +640,22 @@ def push_to_jira(obj, *args, **kwargs): finding = obj if finding.has_jira_issue: return update_jira_issue_for_finding(finding, *args, **kwargs) - else: - return add_jira_issue_for_finding(finding, *args, **kwargs) + return add_jira_issue_for_finding(finding, *args, **kwargs) - elif isinstance(obj, Engagement): + if isinstance(obj, Engagement): engagement = obj if engagement.has_jira_issue: return update_epic(engagement, *args, **kwargs) - else: - return add_epic(engagement, *args, **kwargs) + return add_epic(engagement, *args, **kwargs) - elif isinstance(obj, Finding_Group): + if isinstance(obj, Finding_Group): group = obj if group.has_jira_issue: return update_jira_issue_for_finding_group(group, *args, **kwargs) - else: - return add_jira_issue_for_finding_group(group, *args, **kwargs) + return add_jira_issue_for_finding_group(group, *args, **kwargs) - else: - logger.error("unsupported object passed to push_to_jira: %s %i %s", obj.__name__, obj.id, obj) + logger.error("unsupported object passed to push_to_jira: %s %i %s", obj.__name__, obj.id, obj) + return None def add_issues_to_epic(jira, obj, epic_id, issue_keys, ignore_epics=True): @@ -1022,9 +1021,7 @@ def get_jira_issue_from_jira(find): jira = get_jira_connection(jira_instance) logger.debug("getting issue from JIRA") - issue_from_jira = jira.issue(j_issue.jira_id) - - return issue_from_jira + return jira.issue(j_issue.jira_id) except JIRAError as e: logger.exception(e) @@ -1191,6 +1188,7 @@ def jira_attachment(finding, jira, issue, file, jira_filename=None): logger.exception(e) log_jira_alert("Attachment: " + e.text, finding) return False + return None def jira_check_attachment(issue, source_file_name): @@ -1242,9 +1240,9 @@ def close_epic(eng, push_to_jira, **kwargs): logger.exception(e) log_jira_generic_alert("Jira Engagement/Epic Close Error", str(e)) return False - else: - add_error_message_to_response("Push to JIRA for Epic skipped because enable_engagement_epic_mapping is not checked for this engagement") - return False + return None + add_error_message_to_response("Push to JIRA for Epic skipped because enable_engagement_epic_mapping is not checked for this engagement") + return False @dojo_model_to_id @@ -1350,9 +1348,8 @@ def jira_get_issue(jira_project, issue_key): try: jira_instance = jira_project.jira_instance jira = get_jira_connection(jira_instance) - issue = jira.issue(issue_key) + return jira.issue(issue_key) - return issue except JIRAError as jira_error: logger.debug("error retrieving jira issue " + issue_key + " " + str(jira_error)) logger.exception(jira_error) @@ -1386,6 +1383,8 @@ def add_comment(obj, note, force_push=False, **kwargs): except JIRAError as e: log_jira_generic_alert("Jira Add Comment Error", str(e)) return False + return None + return None def add_simple_jira_comment(jira_instance, jira_issue, comment): @@ -1623,7 +1622,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign owner=finding.reporter, ) finding.test.engagement.risk_acceptance.add(ra) - ra_helper.add_findings_to_risk_acceptance(ra, [finding]) + ra_helper.add_findings_to_risk_acceptance(User.objects.get_or_create(username="JIRA")[0], ra, [finding]) status_changed = True elif jira_instance and resolution_name in jira_instance.false_positive_resolutions: if not finding.false_p: @@ -1633,7 +1632,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign finding.mitigated = None finding.is_mitigated = False finding.false_p = True - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(User.objects.get_or_create(username="JIRA")[0], finding) status_changed = True else: # Mitigated by default as before @@ -1645,7 +1644,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign finding.mitigated_by, _created = User.objects.get_or_create(username="JIRA") finding.endpoints.clear() finding.false_p = False - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(User.objects.get_or_create(username="JIRA")[0], finding) status_changed = True else: if not finding.active: @@ -1655,7 +1654,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign finding.mitigated = None finding.is_mitigated = False finding.false_p = False - ra_helper.risk_unaccept(finding) + ra_helper.risk_unaccept(User.objects.get_or_create(username="JIRA")[0], finding) status_changed = True # for findings in a group, there is no jira_issue attached to the finding diff --git a/dojo/jira_link/queries.py b/dojo/jira_link/queries.py index 6d41b3b6e21..b077c076097 100644 --- a/dojo/jira_link/queries.py +++ b/dojo/jira_link/queries.py @@ -63,7 +63,7 @@ def get_authorized_jira_projects(permission, user=None): product__member=Exists(product_authorized_product_roles), product__prod_type__authorized_group=Exists(product_authorized_product_type_groups), product__authorized_group=Exists(product_authorized_product_groups)) - jira_projects = jira_projects.filter( + return jira_projects.filter( Q(engagement__product__prod_type__member=True) | Q(engagement__product__member=True) | Q(engagement__product__prod_type__authorized_group=True) @@ -73,8 +73,6 @@ def get_authorized_jira_projects(permission, user=None): | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return jira_projects - def get_authorized_jira_issues(permission): user = get_current_user() @@ -152,7 +150,7 @@ def get_authorized_jira_issues(permission): finding__test__engagement__product__member=Exists(finding_authorized_product_roles), finding__test__engagement__product__prod_type__authorized_group=Exists(finding_authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists(finding_authorized_product_groups)) - jira_issues = jira_issues.filter( + return jira_issues.filter( Q(engagement__product__prod_type__member=True) | Q(engagement__product__member=True) | Q(engagement__product__prod_type__authorized_group=True) @@ -165,5 +163,3 @@ def get_authorized_jira_issues(permission): | Q(finding__test__engagement__product__member=True) | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) - - return jira_issues diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index e618c84f01c..fc2f67a373a 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -67,10 +67,10 @@ def webhook(request, secret=None): if not system_settings.enable_jira: return webhook_responser_handler("info", "Ignoring incoming webhook as JIRA is disabled.") # If the webhook is not enabled, then return a 404 - elif not system_settings.enable_jira_web_hook: + if not system_settings.enable_jira_web_hook: return webhook_responser_handler("info", "Ignoring incoming webhook as JIRA Webhook is disabled.") # Determine if the request should be "authenticated" - elif not system_settings.disable_jira_webhook_secret: + if not system_settings.disable_jira_webhook_secret: # Make sure there is a value for the webhook secret before making a comparison if not system_settings.jira_webhook_secret: return webhook_responser_handler("info", "Ignoring incoming webhook as JIRA Webhook secret is empty in Defect Dojo system settings.") @@ -211,7 +211,7 @@ def check_for_and_create_comment(parsed_json): """ comment = parsed_json.get("comment", None) if comment is None: - return + return None comment_text = comment.get("body") commenter = "" @@ -271,6 +271,7 @@ def check_for_and_create_comment(parsed_json): finding.jira_issue.jira_change = timezone.now() finding.jira_issue.save() finding.save() + return None def get_custom_field(jira, label): @@ -432,8 +433,7 @@ def post(self, request): url=request.build_absolute_uri(reverse("jira"))) return HttpResponseRedirect(reverse("jira")) - else: - logger.error("jform.errors: %s", jform.errors) + logger.error("jform.errors: %s", jform.errors) return render(request, self.get_template(), {"jform": jform}) diff --git a/dojo/management/commands/jira_status_reconciliation.py b/dojo/management/commands/jira_status_reconciliation.py index 6ca72dbe1f1..e26aefc0516 100644 --- a/dojo/management/commands/jira_status_reconciliation.py +++ b/dojo/management/commands/jira_status_reconciliation.py @@ -86,7 +86,7 @@ def jira_status_reconciliation(*args, **kwargs): messages.append(message) logger.info(message) continue - elif find.risk_accepted: + if find.risk_accepted: message = "{}; {}/finding/{};{};{};{};{};{};{};{};{};{};{};{}skipping risk accepted findings;{}".format( find.jira_issue.jira_key, settings.SITE_URL, find.id, find.status(), resolution_name, None, None, None, find.jira_issue.jira_change, issue_from_jira.fields.updated, find.last_status_update, issue_from_jira.fields.updated, find.last_reviewed, issue_from_jira.fields.updated, "skipped") @@ -186,6 +186,7 @@ def jira_status_reconciliation(*args, **kwargs): logger.info("results (semicolon seperated)") for message in messages: logger.info(message) + return None class Command(BaseCommand): diff --git a/dojo/metrics/utils.py b/dojo/metrics/utils.py index 263b7065f2c..d22b13beb4d 100644 --- a/dojo/metrics/utils.py +++ b/dojo/metrics/utils.py @@ -3,8 +3,7 @@ from datetime import date, datetime, timedelta from enum import Enum from functools import partial -from math import ceil -from typing import Any, Callable, NamedTuple, TypeVar, Union +from typing import Any, Callable, NamedTuple, Type, TypeVar, Union from dateutil.relativedelta import relativedelta from django.contrib import messages @@ -35,12 +34,18 @@ ) +def get_metrics_finding_filter_class() -> Type[Union[MetricsFindingFilter, MetricsFindingFilterWithoutObjectLookups]]: + if get_system_setting("filter_string_matching", False): + return MetricsFindingFilterWithoutObjectLookups + return MetricsFindingFilter + + def finding_queries( prod_type: QuerySet[Product_Type], request: HttpRequest, ) -> dict[str, Any]: # Get the initial list of findings the user is authorized to see - findings_query = get_authorized_findings( + all_authorized_findings: QuerySet[Finding] = get_authorized_findings( Permissions.Finding_View, user=request.user, ).select_related( @@ -54,46 +59,47 @@ def finding_queries( "test__test_type", ) - filter_string_matching = get_system_setting("filter_string_matching", False) - finding_filter_class = MetricsFindingFilterWithoutObjectLookups if filter_string_matching else MetricsFindingFilter - findings = finding_filter_class(request.GET, queryset=findings_query) - form = findings.form - findings_qs = queryset_check(findings) - # Quick check to determine if the filters were too tight and filtered everything away - if not findings_qs.exists() and not findings_query.exists(): - findings = findings_query - findings_qs = findings if isinstance(findings, QuerySet) else findings.qs + finding_filter_class = get_metrics_finding_filter_class() + findings_filter = finding_filter_class(request.GET, queryset=all_authorized_findings) + form = findings_filter.form + filtered_findings: QuerySet[Finding] = queryset_check(findings_filter) + # Quick check to determine if the filters were too tight and filtered everything away. If so, fall back to using all + # authorized Findings instead. + if not filtered_findings.exists() and all_authorized_findings.exists(): + filtered_findings = all_authorized_findings messages.add_message( request, messages.ERROR, _("All objects have been filtered away. Displaying all objects"), extra_tags="alert-danger") - start_date, end_date = get_date_range(findings_qs) + start_date, end_date = get_date_range(filtered_findings) # Filter by the date ranges supplied - findings_query = findings_query.filter(date__range=[start_date, end_date]) + all_findings_within_date_range = all_authorized_findings.filter(date__range=[start_date, end_date]) # Get the list of closed and risk accepted findings - findings_closed = findings_query.filter(CLOSED_FINDINGS_QUERY) - accepted_findings = findings_query.filter(ACCEPTED_FINDINGS_QUERY) - active_findings = findings_query.filter(OPEN_FINDINGS_QUERY) + closed_filtered_findings = all_findings_within_date_range.filter(CLOSED_FINDINGS_QUERY) + accepted_filtered_findings = all_findings_within_date_range.filter(ACCEPTED_FINDINGS_QUERY) + active_filtered_findings = all_findings_within_date_range.filter(OPEN_FINDINGS_QUERY) # filter by product type if applicable if len(prod_type) > 0: - findings_query = findings_query.filter(test__engagement__product__prod_type__in=prod_type) - findings_closed = findings_closed.filter(test__engagement__product__prod_type__in=prod_type) - accepted_findings = accepted_findings.filter(test__engagement__product__prod_type__in=prod_type) - active_findings = active_findings.filter(test__engagement__product__prod_type__in=prod_type) + all_findings_within_date_range = all_findings_within_date_range.filter( + test__engagement__product__prod_type__in=prod_type) + closed_filtered_findings = closed_filtered_findings.filter(test__engagement__product__prod_type__in=prod_type) + accepted_filtered_findings = accepted_filtered_findings.filter( + test__engagement__product__prod_type__in=prod_type) + active_filtered_findings = active_filtered_findings.filter(test__engagement__product__prod_type__in=prod_type) # Get the severity counts of risk accepted findings - accepted_findings_counts = severity_count(accepted_findings, "aggregate", "severity") + accepted_findings_counts = severity_count(accepted_filtered_findings, "aggregate", "severity") weeks_between, months_between = period_deltas(start_date, end_date) query_counts_for_period = query_counts( - findings_query, - active_findings, - accepted_findings, + all_findings_within_date_range, + active_filtered_findings, + accepted_filtered_findings, start_date, MetricsType.FINDING, ) @@ -117,9 +123,9 @@ def finding_queries( )[:10] return { - "all": findings_query, - "closed": findings_closed, - "accepted": accepted_findings, + "all": filtered_findings, + "closed": closed_filtered_findings, + "accepted": accepted_filtered_findings, "accepted_count": accepted_findings_counts, "top_ten": top_ten, "monthly_counts": monthly_counts, @@ -454,13 +460,8 @@ def period_deltas(start_date, end_date): :return: A tuple of integers representing (number of weeks between the dates, number of months between the dates) """ r = relativedelta(end_date, start_date) - months_between = (r.years * 12) + r.months - # include current month - months_between += 1 - - weeks_between = int(ceil((((r.years * 12) + r.months) * 4.33) + (r.days / 7))) - if weeks_between <= 0: - weeks_between += 2 + months_between = max((r.years * 12) + r.months, 2) + weeks_between = max((end_date - start_date).days // 7, 2) return weeks_between, months_between @@ -500,7 +501,7 @@ def aggregate_counts_by_period( ) desired_values += ("closed",) - return severities_by_period.values(*desired_values) + return severities_by_period.order_by("grouped_date").values(*desired_values) def findings_by_product( @@ -597,5 +598,4 @@ def findings_queryset( """ if qs.model is Endpoint_Status: return Finding.objects.filter(status_finding__in=qs) - else: - return qs + return qs diff --git a/dojo/models.py b/dojo/models.py index 5048f30427f..2346c1e916c 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -353,6 +353,13 @@ class System_Settings(models.Model): mail_notifications_to = models.CharField(max_length=200, default="", blank=True) + enable_webhooks_notifications = \ + models.BooleanField(default=False, + verbose_name=_("Enable Webhook notifications"), + blank=False) + webhooks_notifications_timeout = models.IntegerField(default=10, + help_text=_("How many seconds will DefectDojo waits for response from webhook endpoint")) + false_positive_history = models.BooleanField( default=False, help_text=_( "(EXPERIMENTAL) DefectDojo will automatically mark the finding as a " @@ -757,9 +764,8 @@ def get_absolute_url(self): return reverse("product_type", args=[str(self.id)]) def get_breadcrumbs(self): - bc = [{"title": str(self), + return [{"title": str(self), "url": reverse("edit_product_type", args=(self.id,))}] - return bc @cached_property def critical_present(self): @@ -767,6 +773,7 @@ def critical_present(self): test__engagement__product__prod_type=self, severity="Critical") if c_findings.count() > 0: return True + return None @cached_property def high_present(self): @@ -774,6 +781,7 @@ def high_present(self): test__engagement__product__prod_type=self, severity="High") if c_findings.count() > 0: return True + return None @cached_property def calc_health(self): @@ -791,8 +799,7 @@ def calc_health(self): health = health - ((h_findings.count() - 1) * 2) if health < 5: return 5 - else: - return health + return health # only used by bulk risk acceptance api @property @@ -828,9 +835,8 @@ def __str__(self): return self.name def get_breadcrumbs(self): - bc = [{"title": str(self), + return [{"title": str(self), "url": None}] - return bc class DojoMeta(models.Model): @@ -1173,8 +1179,7 @@ def endpoint_host_count(self): for e in endpoints: if e.host in hosts: continue - else: - hosts.append(e.host) + hosts.append(e.host) return len(hosts) @@ -1189,53 +1194,51 @@ def endpoint_count(self): def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: return {} - else: - critical = Finding.objects.filter(test__engagement__product=self, - mitigated__isnull=True, - verified=True, - false_p=False, - duplicate=False, - out_of_scope=False, - severity="Critical", - date__range=[start_date, - end_date]).count() - high = Finding.objects.filter(test__engagement__product=self, + critical = Finding.objects.filter(test__engagement__product=self, mitigated__isnull=True, verified=True, false_p=False, duplicate=False, out_of_scope=False, - severity="High", + severity="Critical", date__range=[start_date, end_date]).count() - medium = Finding.objects.filter(test__engagement__product=self, - mitigated__isnull=True, - verified=True, - false_p=False, - duplicate=False, - out_of_scope=False, - severity="Medium", - date__range=[start_date, - end_date]).count() - low = Finding.objects.filter(test__engagement__product=self, - mitigated__isnull=True, - verified=True, - false_p=False, - duplicate=False, - out_of_scope=False, - severity="Low", - date__range=[start_date, - end_date]).count() - return {"Critical": critical, - "High": high, - "Medium": medium, - "Low": low, - "Total": (critical + high + medium + low)} + high = Finding.objects.filter(test__engagement__product=self, + mitigated__isnull=True, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + severity="High", + date__range=[start_date, + end_date]).count() + medium = Finding.objects.filter(test__engagement__product=self, + mitigated__isnull=True, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + severity="Medium", + date__range=[start_date, + end_date]).count() + low = Finding.objects.filter(test__engagement__product=self, + mitigated__isnull=True, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + severity="Low", + date__range=[start_date, + end_date]).count() + return {"Critical": critical, + "High": high, + "Medium": medium, + "Low": low, + "Total": (critical + high + medium + low)} def get_breadcrumbs(self): - bc = [{"title": str(self), + return [{"title": str(self), "url": reverse("view_product", args=(self.id,))}] - return bc @property def get_product_type(self): @@ -1693,9 +1696,8 @@ def __str__(self): msg = "hyperlink lib did not create URL as was expected" raise ValueError(msg) return clean_url - else: - msg = "Missing host" - raise ValueError(msg) + msg = "Missing host" + raise ValueError(msg) except: url = "" if self.protocol: @@ -1807,11 +1809,9 @@ def __eq__(self, other): products_match = (self.product) == other.product # Check if the contents match return products_match and contents_match - else: - return contents_match + return contents_match - else: - return NotImplemented + return NotImplemented @property def is_broken(self): @@ -1822,8 +1822,7 @@ def is_broken(self): else: if self.product: return False - else: - return True + return True @property def mitigated(self): @@ -1844,7 +1843,7 @@ def findings_count(self): return self.findings.all().count() def active_findings(self): - findings = self.findings.filter( + return self.findings.filter( active=True, out_of_scope=False, mitigated__isnull=True, @@ -1854,10 +1853,9 @@ def active_findings(self): status_finding__out_of_scope=False, status_finding__risk_accepted=False, ).order_by("numerical_severity") - return findings def active_verified_findings(self): - findings = self.findings.filter( + return self.findings.filter( active=True, verified=True, out_of_scope=False, @@ -1868,7 +1866,6 @@ def active_verified_findings(self): status_finding__out_of_scope=False, status_finding__risk_accepted=False, ).order_by("numerical_severity") - return findings @property def active_findings_count(self): @@ -1912,7 +1909,7 @@ def host_findings_count(self): return self.host_findings().count() def host_active_findings(self): - findings = Finding.objects.filter( + return Finding.objects.filter( active=True, out_of_scope=False, mitigated__isnull=True, @@ -1923,10 +1920,9 @@ def host_active_findings(self): status_finding__risk_accepted=False, endpoints__in=self.host_endpoints(), ).order_by("numerical_severity") - return findings def host_active_verified_findings(self): - findings = Finding.objects.filter( + return Finding.objects.filter( active=True, verified=True, out_of_scope=False, @@ -1938,7 +1934,6 @@ def host_active_verified_findings(self): status_finding__risk_accepted=False, endpoints__in=self.host_endpoints(), ).order_by("numerical_severity") - return findings @property def host_active_findings_count(self): @@ -2213,8 +2208,7 @@ def get_queryset(self): super_query = super_query.annotate(created_findings_count=Count("findings", filter=Q(test_import_finding_action__action=IMPORT_CREATED_FINDING))) super_query = super_query.annotate(closed_findings_count=Count("findings", filter=Q(test_import_finding_action__action=IMPORT_CLOSED_FINDING))) super_query = super_query.annotate(reactivated_findings_count=Count("findings", filter=Q(test_import_finding_action__action=IMPORT_REACTIVATED_FINDING))) - super_query = super_query.annotate(untouched_findings_count=Count("findings", filter=Q(test_import_finding_action__action=IMPORT_UNTOUCHED_FINDING))) - return super_query + return super_query.annotate(untouched_findings_count=Count("findings", filter=Q(test_import_finding_action__action=IMPORT_UNTOUCHED_FINDING))) class Meta: ordering = ("-id",) @@ -2871,53 +2865,47 @@ def hash_fields(self, fields_to_hash): def duplicate_finding_set(self): if self.duplicate: if self.duplicate_finding is not None: - originals = Finding.objects.get( + return Finding.objects.get( id=self.duplicate_finding.id).original_finding.all().order_by("title") - return originals # we need to add the duplicate_finding here as well - else: - return [] - else: - return self.original_finding.all().order_by("title") + return [] + return self.original_finding.all().order_by("title") def get_scanner_confidence_text(self): if self.scanner_confidence and isinstance(self.scanner_confidence, int): if self.scanner_confidence <= 2: return "Certain" - elif self.scanner_confidence >= 3 and self.scanner_confidence <= 5: + if self.scanner_confidence >= 3 and self.scanner_confidence <= 5: return "Firm" - else: - return "Tentative" + return "Tentative" return "" @staticmethod def get_numerical_severity(severity): if severity == "Critical": return "S0" - elif severity == "High": + if severity == "High": return "S1" - elif severity == "Medium": + if severity == "Medium": return "S2" - elif severity == "Low": + if severity == "Low": return "S3" - elif severity == "Info": + if severity == "Info": return "S4" - else: - return "S5" + return "S5" @staticmethod def get_number_severity(severity): if severity == "Critical": return 4 - elif severity == "High": + if severity == "High": return 3 - elif severity == "Medium": + if severity == "Medium": return 2 - elif severity == "Low": + if severity == "Low": return 1 - elif severity == "Info": + if severity == "Info": return 0 - else: - return 5 + return 5 @staticmethod def get_severity(num_severity): @@ -2991,8 +2979,7 @@ def sla_age(self): def get_sla_start_date(self): if self.sla_start_date: return self.sla_start_date - else: - return self.date + return self.date def get_sla_period(self): sla_configuration = SLA_Configuration.objects.filter(id=self.test.engagement.product.sla_configuration_id).first() @@ -3003,7 +2990,7 @@ def get_sla_period(self): def set_sla_expiration_date(self): system_settings = System_Settings.objects.get() if not system_settings.enable_finding_sla: - return None + return days_remaining = None sla_period, enforce_period = self.get_sla_period() @@ -3011,7 +2998,7 @@ def set_sla_expiration_date(self): days_remaining = sla_period - self.sla_age else: self.sla_expiration_date = Finding().sla_expiration_date - return None + return if days_remaining: if self.mitigated: @@ -3029,8 +3016,7 @@ def sla_days_remaining(self): if isinstance(mitigated_date, datetime): mitigated_date = self.mitigated.date() return (self.sla_expiration_date - mitigated_date).days - else: - return (self.sla_expiration_date - get_current_date()).days + return (self.sla_expiration_date - get_current_date()).days return None def sla_deadline(self): @@ -3072,9 +3058,8 @@ def has_jira_issue(self): @cached_property def finding_group(self): - group = self.finding_group_set.all().first() + return self.finding_group_set.all().first() # logger.debug('finding.finding_group: %s', group) - return group @cached_property def has_jira_group_issue(self): @@ -3124,21 +3109,20 @@ def get_valid_request_response_pairs(self): # Get a list of all req/resp pairs all_req_resps = self.burprawrequestresponse_set.all() # Filter away those that do not have any contents - valid_req_resps = all_req_resps.exclude( + return all_req_resps.exclude( burpRequestBase64__exact=empty_value, burpResponseBase64__exact=empty_value, ) - return valid_req_resps - def get_report_requests(self): # Get the list of request response pairs that are non empty request_response_pairs = self.get_valid_request_response_pairs() # Determine how many to return if request_response_pairs.count() >= 3: return request_response_pairs[0:3] - elif request_response_pairs.count() > 0: + if request_response_pairs.count() > 0: return request_response_pairs + return None def get_request(self): # Get the list of request response pairs that are non empty @@ -3156,8 +3140,7 @@ def get_response(self): reqres = request_response_pairs.first() res = base64.b64decode(reqres.burpResponseBase64) # Removes all blank lines - res = re.sub(r"\n\s*\n", "\n", res) - return res + return re.sub(r"\n\s*\n", "\n", res) def latest_note(self): if self.notes.all(): @@ -3243,8 +3226,7 @@ def bitbucket_standalone_prepare_scm_base_link(self, uri): project = parts_project[0] if project.startswith("~"): return parts_scm[0] + "/users/" + parts_project[0][1:] + "/repos/" + parts_project[1] + "/browse" - else: - return parts_scm[0] + "/projects/" + parts_project[0] + "/repos/" + parts_project[1] + "/browse" + return parts_scm[0] + "/projects/" + parts_project[0] + "/repos/" + parts_project[1] + "/browse" def bitbucket_standalone_prepare_scm_link(self, uri): # if commit hash or branch/tag is set for engagement/test - @@ -3329,9 +3311,7 @@ def vulnerability_ids(self): vulnerability_ids = [self.cve] # Remove duplicates - vulnerability_ids = list(dict.fromkeys(vulnerability_ids)) - - return vulnerability_ids + return list(dict.fromkeys(vulnerability_ids)) def inherit_tags(self, potentially_existing_tags): # get a copy of the tags to be inherited @@ -3519,9 +3499,8 @@ def get_absolute_url(self): return reverse("edit_template", args=[str(self.id)]) def get_breadcrumbs(self): - bc = [{"title": str(self), + return [{"title": str(self), "url": reverse("view_template", args=(self.id,))}] - return bc @cached_property def vulnerability_ids(self): @@ -3542,9 +3521,7 @@ def vulnerability_ids(self): vulnerability_ids = [self.cve] # Remove duplicates - vulnerability_ids = list(dict.fromkeys(vulnerability_ids)) - - return vulnerability_ids + return list(dict.fromkeys(vulnerability_ids)) class Vulnerability_Id_Template(models.Model): @@ -3592,10 +3569,9 @@ class Check_List(models.Model): def get_status(pass_fail): if pass_fail == "Pass": return "success" - elif pass_fail == "Fail": + if pass_fail == "Fail": return "danger" - else: - return "warning" + return "warning" def get_breadcrumb(self): bc = self.engagement.get_breadcrumb() @@ -3616,8 +3592,7 @@ def get_request(self): def get_response(self): res = str(base64.b64decode(self.burpResponseBase64), errors="ignore") # Removes all blank lines - res = re.sub(r"\n\s*\n", "\n", res) - return res + return re.sub(r"\n\s*\n", "\n", res) class Risk_Acceptance(models.Model): @@ -3873,16 +3848,15 @@ def false_positive_resolutions(self): def get_priority(self, status): if status == "Info": return self.info_mapping_severity - elif status == "Low": + if status == "Low": return self.low_mapping_severity - elif status == "Medium": + if status == "Medium": return self.medium_mapping_severity - elif status == "High": + if status == "High": return self.high_mapping_severity - elif status == "Critical": + if status == "Critical": return self.critical_mapping_severity - else: - return "N/A" + return "N/A" # declare form here as we can't import forms.py due to circular imports not even locally @@ -4015,12 +3989,14 @@ def set_obj(self, obj): NOTIFICATION_CHOICE_SLACK = ("slack", "slack") NOTIFICATION_CHOICE_MSTEAMS = ("msteams", "msteams") NOTIFICATION_CHOICE_MAIL = ("mail", "mail") +NOTIFICATION_CHOICE_WEBHOOKS = ("webhooks", "webhooks") NOTIFICATION_CHOICE_ALERT = ("alert", "alert") NOTIFICATION_CHOICES = ( NOTIFICATION_CHOICE_SLACK, NOTIFICATION_CHOICE_MSTEAMS, NOTIFICATION_CHOICE_MAIL, + NOTIFICATION_CHOICE_WEBHOOKS, NOTIFICATION_CHOICE_ALERT, ) @@ -4109,6 +4085,33 @@ def get_list_display(self, request): return list_fields +class Notification_Webhooks(models.Model): + class Status(models.TextChoices): + __STATUS_ACTIVE = "active" + __STATUS_INACTIVE = "inactive" + STATUS_ACTIVE = f"{__STATUS_ACTIVE}", _("Active") + STATUS_ACTIVE_TMP = f"{__STATUS_ACTIVE}_tmp", _("Active but 5xx (or similar) error detected") + STATUS_INACTIVE_TMP = f"{__STATUS_INACTIVE}_tmp", _("Temporary inactive because of 5xx (or similar) error") + STATUS_INACTIVE_PERMANENT = f"{__STATUS_INACTIVE}_permanent", _("Permanently inactive") + + name = models.CharField(max_length=100, default="", blank=False, unique=True, + help_text=_("Name of the incoming webhook")) + url = models.URLField(max_length=200, default="", blank=False, + help_text=_("The full URL of the incoming webhook")) + header_name = models.CharField(max_length=100, default="", blank=True, null=True, + help_text=_("Name of the header required for interacting with Webhook endpoint")) + header_value = models.CharField(max_length=100, default="", blank=True, null=True, + help_text=_("Content of the header required for interacting with Webhook endpoint")) + status = models.CharField(max_length=20, choices=Status, default="active", blank=False, + help_text=_("Status of the incoming webhook"), editable=False) + first_error = models.DateTimeField(help_text=_("If endpoint is active, when error happened first time"), blank=True, null=True, editable=False) + last_error = models.DateTimeField(help_text=_("If endpoint is active, when error happened last time"), blank=True, null=True, editable=False) + note = models.CharField(max_length=1000, default="", blank=True, null=True, help_text=_("Description of the latest error"), editable=False) + owner = models.ForeignKey(Dojo_User, editable=True, null=True, blank=True, on_delete=models.CASCADE, + help_text=_("Owner/receiver of notification, if empty processed as system notification")) + # TODO: Test that `editable` will block editing via API + + class Tool_Product_Settings(models.Model): name = models.CharField(max_length=200, null=False) description = models.CharField(max_length=2000, null=True, blank=True) @@ -4563,8 +4566,7 @@ class ChoiceAnswer(Answer): def __str__(self): if len(self.answer.all()): return str(self.answer.all()[0]) - else: - return "No Response" + return "No Response" if settings.ENABLE_AUDITLOG: @@ -4581,6 +4583,7 @@ def __str__(self): auditlog.register(Risk_Acceptance) auditlog.register(Finding_Template) auditlog.register(Cred_User, exclude_fields=["password"]) + auditlog.register(Notification_Webhooks, exclude_fields=["header_name", "header_value"]) from dojo.utils import calculate_grade, to_str_typed # noqa: E402 # there is issue due to a circular import @@ -4642,6 +4645,7 @@ def __str__(self): admin.site.register(GITHUB_Details_Cache) admin.site.register(GITHUB_PKey) admin.site.register(Tool_Configuration, Tool_Configuration_Admin) +admin.site.register(Notification_Webhooks) admin.site.register(Tool_Product_Settings) admin.site.register(Tool_Type) admin.site.register(Cred_User) diff --git a/dojo/notes/views.py b/dojo/notes/views.py index a5947971b8a..6dfca7895d1 100644 --- a/dojo/notes/views.py +++ b/dojo/notes/views.py @@ -123,11 +123,10 @@ def edit_note(request, id, page, objid): _("Note edited."), extra_tags="alert-success") return HttpResponseRedirect(reverse(reverse_url, args=(object_id, ))) - else: - messages.add_message(request, - messages.SUCCESS, - _("Note was not succesfully edited."), - extra_tags="alert-danger") + messages.add_message(request, + messages.SUCCESS, + _("Note was not succesfully edited."), + extra_tags="alert-danger") else: if note_type_activation: form = TypedNoteForm(available_note_types=available_note_types, instance=note) @@ -195,5 +194,4 @@ def find_available_notetypes(finding, editing_note): available_note_types.append(note_type_id) available_note_types.append(editing_note.note_type_id) available_note_types = list(set(available_note_types)) - queryset = Note_Type.objects.filter(id__in=available_note_types).order_by("-id") - return queryset + return Note_Type.objects.filter(id__in=available_note_types).order_by("-id") diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index 5a7ccf0dc60..ce3f52bf1a5 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -1,6 +1,9 @@ +import json import logging +from datetime import timedelta import requests +import yaml from django.conf import settings from django.core.exceptions import FieldDoesNotExist from django.core.mail import EmailMessage @@ -10,10 +13,19 @@ from django.urls import reverse from django.utils.translation import gettext as _ +from dojo import __version__ as dd_version from dojo.authorization.roles_permissions import Permissions from dojo.celery import app from dojo.decorators import dojo_async_task, we_want_async -from dojo.models import Alerts, Dojo_User, Notifications, System_Settings, UserContactInfo +from dojo.models import ( + Alerts, + Dojo_User, + Notification_Webhooks, + Notifications, + System_Settings, + UserContactInfo, + get_current_datetime, +) from dojo.user.queries import get_authorized_users_for_product_and_product_type, get_authorized_users_for_product_type logger = logging.getLogger(__name__) @@ -144,8 +156,9 @@ def create_notification_message(event, user, notification_type, *args, **kwargs) try: notification_message = render_to_string(template, kwargs) logger.debug("Rendering from the template %s", template) - except TemplateDoesNotExist: - logger.debug("template not found or not implemented yet: %s", template) + except TemplateDoesNotExist as e: + # In some cases, template includes another templates, if the interior one is missing, we will see it in "specifically" section + logger.debug(f"template not found or not implemented yet: {template} (specifically: {e.args})") except Exception as e: logger.error("error during rendering of template %s exception is %s", template, e) finally: @@ -170,6 +183,7 @@ def process_notifications(event, notifications=None, **kwargs): slack_enabled = get_system_setting("enable_slack_notifications") msteams_enabled = get_system_setting("enable_msteams_notifications") mail_enabled = get_system_setting("enable_mail_notifications") + webhooks_enabled = get_system_setting("enable_webhooks_notifications") if slack_enabled and "slack" in getattr(notifications, event, getattr(notifications, "other")): logger.debug("Sending Slack Notification") @@ -183,6 +197,10 @@ def process_notifications(event, notifications=None, **kwargs): logger.debug("Sending Mail Notification") send_mail_notification(event, notifications.user, **kwargs) + if webhooks_enabled and "webhooks" in getattr(notifications, event, getattr(notifications, "other")): + logger.debug("Sending Webhooks Notification") + send_webhooks_notification(event, notifications.user, **kwargs) + if "alert" in getattr(notifications, event, getattr(notifications, "other")): logger.debug(f"Sending Alert to {notifications.user}") send_alert_notification(event, notifications.user, **kwargs) @@ -309,6 +327,156 @@ def send_mail_notification(event, user=None, *args, **kwargs): log_alert(e, "Email Notification", title=kwargs["title"], description=str(e), url=kwargs["url"]) +def webhooks_notification_request(endpoint, event, *args, **kwargs): + from dojo.utils import get_system_setting + + headers = { + "User-Agent": f"DefectDojo-{dd_version}", + "X-DefectDojo-Event": event, + "X-DefectDojo-Instance": settings.SITE_URL, + "Accept": "application/json", + } + if endpoint.header_name is not None: + headers[endpoint.header_name] = endpoint.header_value + yaml_data = create_notification_message(event, endpoint.owner, "webhooks", *args, **kwargs) + data = yaml.safe_load(yaml_data) + + timeout = get_system_setting("webhooks_notifications_timeout") + + return requests.request( + method="POST", + url=endpoint.url, + headers=headers, + json=data, + timeout=timeout, + ) + + +def test_webhooks_notification(endpoint): + res = webhooks_notification_request(endpoint, "ping", description="Test webhook notification") + res.raise_for_status() + # in "send_webhooks_notification", we are doing deeper analysis, why it failed + # for now, "raise_for_status" should be enough + + +@app.task(ignore_result=True) +def webhook_reactivation(endpoint_id: int, *args, **kwargs): + endpoint = Notification_Webhooks.objects.get(pk=endpoint_id) + + # User already changed status of endpoint + if endpoint.status != Notification_Webhooks.Status.STATUS_INACTIVE_TMP: + return + + endpoint.status = Notification_Webhooks.Status.STATUS_ACTIVE_TMP + endpoint.save() + logger.debug(f"Webhook endpoint '{endpoint.name}' reactivated to '{Notification_Webhooks.Status.STATUS_ACTIVE_TMP}'") + + +@app.task(ignore_result=True) +def webhook_status_cleanup(*args, **kwargs): + # If some endpoint was affected by some outage (5xx, 429, Timeout) but it was clean during last 24 hours, + # we consider this endpoint as healthy so need to reset it + endpoints = Notification_Webhooks.objects.filter( + status=Notification_Webhooks.Status.STATUS_ACTIVE_TMP, + last_error__lt=get_current_datetime() - timedelta(hours=24), + ) + for endpoint in endpoints: + endpoint.status = Notification_Webhooks.Status.STATUS_ACTIVE + endpoint.first_error = None + endpoint.last_error = None + endpoint.note = f"Reactivation from {Notification_Webhooks.Status.STATUS_ACTIVE_TMP}" + endpoint.save() + logger.debug(f"Webhook endpoint '{endpoint.name}' reactivated from '{Notification_Webhooks.Status.STATUS_ACTIVE_TMP}' to '{Notification_Webhooks.Status.STATUS_ACTIVE}'") + + # Reactivation of STATUS_INACTIVE_TMP endpoints. + # They should reactive automatically in 60s, however in case of some unexpected event (e.g. start of whole stack), + # endpoints should not be left in STATUS_INACTIVE_TMP state + broken_endpoints = Notification_Webhooks.objects.filter( + status=Notification_Webhooks.Status.STATUS_INACTIVE_TMP, + last_error__lt=get_current_datetime() - timedelta(minutes=5), + ) + for endpoint in broken_endpoints: + webhook_reactivation(endpoint_id=endpoint.pk) + + +@dojo_async_task +@app.task +def send_webhooks_notification(event, user=None, *args, **kwargs): + + ERROR_PERMANENT = "permanent" + ERROR_TEMPORARY = "temporary" + + endpoints = Notification_Webhooks.objects.filter(owner=user) + + if not endpoints: + if user: + logger.info(f"URLs for Webhooks not configured for user '{user}': skipping user notification") + else: + logger.info("URLs for Webhooks not configured: skipping system notification") + return + + for endpoint in endpoints: + + error = None + if endpoint.status not in [Notification_Webhooks.Status.STATUS_ACTIVE, Notification_Webhooks.Status.STATUS_ACTIVE_TMP]: + logger.info(f"URL for Webhook '{endpoint.name}' is not active: {endpoint.get_status_display()} ({endpoint.status})") + continue + + try: + logger.debug(f"Sending webhook message to endpoint '{endpoint.name}'") + res = webhooks_notification_request(endpoint, event, *args, **kwargs) + + if 200 <= res.status_code < 300: + logger.debug(f"Message sent to endpoint '{endpoint.name}' successfully.") + continue + + # HTTP request passed successfully but we still need to check status code + if 500 <= res.status_code < 600 or res.status_code == 429: + error = ERROR_TEMPORARY + else: + error = ERROR_PERMANENT + + endpoint.note = f"Response status code: {res.status_code}" + logger.error(f"Error when sending message to Webhooks '{endpoint.name}' (status: {res.status_code}): {res.text}") + + except requests.exceptions.Timeout as e: + error = ERROR_TEMPORARY + endpoint.note = f"Requests exception: {e}" + logger.error(f"Timeout when sending message to Webhook '{endpoint.name}'") + + except Exception as e: + error = ERROR_PERMANENT + endpoint.note = f"Exception: {e}"[:1000] + logger.exception(e) + log_alert(e, "Webhooks Notification") + + now = get_current_datetime() + + if error == ERROR_TEMPORARY: + + # If endpoint is unstable for more then one day, it needs to be deactivated + if endpoint.first_error is not None and (now - endpoint.first_error).total_seconds() > 60 * 60 * 24: + endpoint.status = Notification_Webhooks.Status.STATUS_INACTIVE_PERMANENT + + else: + # We need to monitor when outage started + if endpoint.status == Notification_Webhooks.Status.STATUS_ACTIVE: + endpoint.first_error = now + + endpoint.status = Notification_Webhooks.Status.STATUS_INACTIVE_TMP + + # In case of failure within one day, endpoint can be deactivated temporally only for one minute + webhook_reactivation.apply_async(kwargs={"endpoint_id": endpoint.pk}, countdown=60) + + # There is no reason to keep endpoint active if it is returning 4xx errors + else: + endpoint.status = Notification_Webhooks.Status.STATUS_INACTIVE_PERMANENT + endpoint.first_error = now + + endpoint.last_error = now + endpoint.save() + + def send_alert_notification(event, user=None, *args, **kwargs): logger.debug("sending alert notification to %s", user) try: @@ -335,7 +503,6 @@ def send_alert_notification(event, user=None, *args, **kwargs): def get_slack_user_id(user_email): - import json from dojo.utils import get_system_setting @@ -354,18 +521,17 @@ def get_slack_user_id(user_email): logger.error("Slack is complaining. See error message below.") logger.error(user) raise RuntimeError("Error getting user list from Slack: " + res.text) - else: - if "email" in user["user"]["profile"]: - if user_email == user["user"]["profile"]["email"]: - if "id" in user["user"]: - user_id = user["user"]["id"] - logger.debug(f"Slack user ID is {user_id}") - slack_user_is_found = True - else: - logger.warning(f"A user with email {user_email} could not be found in this Slack workspace.") - - if not slack_user_is_found: - logger.warning("The Slack user was not found.") + if "email" in user["user"]["profile"]: + if user_email == user["user"]["profile"]["email"]: + if "id" in user["user"]: + user_id = user["user"]["id"] + logger.debug(f"Slack user ID is {user_id}") + slack_user_is_found = True + else: + logger.warning(f"A user with email {user_email} could not be found in this Slack workspace.") + + if not slack_user_is_found: + logger.warning("The Slack user was not found.") return user_id @@ -390,7 +556,7 @@ def log_alert(e, notification_type=None, *args, **kwargs): def notify_test_created(test): title = "Test created for " + str(test.engagement.product) + ": " + str(test.engagement.name) + ": " + str(test) create_notification(event="test_added", title=title, test=test, engagement=test.engagement, product=test.engagement.product, - url=reverse("view_test", args=(test.id,))) + url=reverse("view_test", args=(test.id,)), url_api=reverse("test-detail", args=(test.id,))) def notify_scan_added(test, updated_count, new_findings=[], findings_mitigated=[], findings_reactivated=[], findings_untouched=[]): @@ -410,4 +576,4 @@ def notify_scan_added(test, updated_count, new_findings=[], findings_mitigated=[ create_notification(event=event, title=title, findings_new=new_findings, findings_mitigated=findings_mitigated, findings_reactivated=findings_reactivated, finding_count=updated_count, test=test, engagement=test.engagement, product=test.engagement.product, findings_untouched=findings_untouched, - url=reverse("view_test", args=(test.id,))) + url=reverse("view_test", args=(test.id,)), url_api=reverse("test-detail", args=(test.id,))) diff --git a/dojo/notifications/urls.py b/dojo/notifications/urls.py index dc91f7a04e2..6f4cba7bb64 100644 --- a/dojo/notifications/urls.py +++ b/dojo/notifications/urls.py @@ -7,4 +7,8 @@ re_path(r"^notifications/system$", views.SystemNotificationsView.as_view(), name="system_notifications"), re_path(r"^notifications/personal$", views.PersonalNotificationsView.as_view(), name="personal_notifications"), re_path(r"^notifications/template$", views.TemplateNotificationsView.as_view(), name="template_notifications"), + re_path(r"^notifications/webhooks$", views.ListNotificationWebhooksView.as_view(), name="notification_webhooks"), + re_path(r"^notifications/webhooks/add$", views.AddNotificationWebhooksView.as_view(), name="add_notification_webhook"), + re_path(r"^notifications/webhooks/(?P\d+)/edit$", views.EditNotificationWebhooksView.as_view(), name="edit_notification_webhook"), + re_path(r"^notifications/webhooks/(?P\d+)/delete$", views.DeleteNotificationWebhooksView.as_view(), name="delete_notification_webhook"), ] diff --git a/dojo/notifications/views.py b/dojo/notifications/views.py index 8a94d2ad7c5..7fe5562ee7e 100644 --- a/dojo/notifications/views.py +++ b/dojo/notifications/views.py @@ -1,15 +1,18 @@ import logging +import requests from django.contrib import messages from django.core.exceptions import PermissionDenied -from django.http import HttpRequest -from django.shortcuts import render +from django.http import Http404, HttpRequest, HttpResponseRedirect +from django.shortcuts import get_object_or_404, render +from django.urls import reverse from django.utils.translation import gettext as _ from django.views import View -from dojo.forms import NotificationsForm -from dojo.models import Notifications -from dojo.utils import add_breadcrumb, get_enabled_notifications_list +from dojo.forms import DeleteNotificationsWebhookForm, NotificationsForm, NotificationsWebhookForm +from dojo.models import Notification_Webhooks, Notifications +from dojo.notifications.helper import test_webhooks_notification +from dojo.utils import add_breadcrumb, get_enabled_notifications_list, get_system_setting logger = logging.getLogger(__name__) @@ -129,3 +132,292 @@ def get_scope(self): def set_breadcrumbs(self, request: HttpRequest): add_breadcrumb(title=_("Template notification settings"), top_level=False, request=request) return request + + +class NotificationWebhooksView(View): + + def check_webhooks_enabled(self): + if not get_system_setting("enable_webhooks_notifications"): + raise Http404 + + def check_user_permissions(self, request: HttpRequest): + if not request.user.is_superuser: + raise PermissionDenied + # TODO: finished access for other users + # if not user_has_configuration_permission(request.user, self.permission): + # raise PermissionDenied() + + def set_breadcrumbs(self, request: HttpRequest): + add_breadcrumb(title=self.breadcrumb, top_level=False, request=request) + return request + + def get_form( + self, + request: HttpRequest, + **kwargs: dict, + ) -> NotificationsWebhookForm: + if request.method == "POST": + return NotificationsWebhookForm(request.POST, is_superuser=request.user.is_superuser, **kwargs) + return NotificationsWebhookForm(is_superuser=request.user.is_superuser, **kwargs) + + def preprocess_request(self, request: HttpRequest): + # Check Webhook notifications are enabled + self.check_webhooks_enabled() + # Check permissions + self.check_user_permissions(request) + + +class ListNotificationWebhooksView(NotificationWebhooksView): + template = "dojo/view_notification_webhooks.html" + permission = "dojo.view_notification_webhooks" + breadcrumb = "Notification Webhook List" + + def get_initial_context(self, request: HttpRequest, nwhs: Notification_Webhooks): + return { + "name": "Notification Webhook List", + "metric": False, + "user": request.user, + "nwhs": nwhs, + } + + def get_notification_webhooks(self, request: HttpRequest): + return Notification_Webhooks.objects.all().order_by("name") + # TODO: finished pagination + # TODO: restrict based on user - not only superadmins have access and they see everything + + def get(self, request: HttpRequest): + # Run common checks + super().preprocess_request(request) + # Get Notification Webhooks + nwhs = self.get_notification_webhooks(request) + # Set up the initial context + context = self.get_initial_context(request, nwhs) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + +class AddNotificationWebhooksView(NotificationWebhooksView): + template = "dojo/add_notification_webhook.html" + permission = "dojo.add_notification_webhooks" + breadcrumb = "Add Notification Webhook" + + # TODO: Disable Owner if not superadmin + + def get_initial_context(self, request: HttpRequest): + return { + "name": "Add Notification Webhook", + "user": request.user, + "form": self.get_form(request), + } + + def process_form(self, request: HttpRequest, context: dict): + form = context["form"] + if form.is_valid(): + try: + test_webhooks_notification(form.instance) + except requests.exceptions.RequestException as e: + messages.add_message( + request, + messages.ERROR, + _("Test of endpoint was not successful: %(error)s") % {"error": str(e)}, + extra_tags="alert-danger", + ) + return request, False + else: + # User can put here what ever he want + # we override it with our only valid defaults + nwh = form.save(commit=False) + nwh.status = Notification_Webhooks.Status.STATUS_ACTIVE + nwh.first_error = None + nwh.last_error = None + nwh.note = None + nwh.save() + messages.add_message( + request, + messages.SUCCESS, + _("Notification Webhook added successfully."), + extra_tags="alert-success", + ) + return request, True + return request, False + + def get(self, request: HttpRequest): + # Run common checks + super().preprocess_request(request) + # Set up the initial context + context = self.get_initial_context(request) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + def post(self, request: HttpRequest): + # Run common checks + super().preprocess_request(request) + # Set up the initial context + context = self.get_initial_context(request) + # Determine the validity of the form + request, success = self.process_form(request, context) + if success: + return HttpResponseRedirect(reverse("notification_webhooks")) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + +class EditNotificationWebhooksView(NotificationWebhooksView): + template = "dojo/edit_notification_webhook.html" + permission = "dojo.change_notification_webhooks" + # TODO: this could be better: @user_is_authorized(Finding, Permissions.Finding_Delete, 'fid') + breadcrumb = "Edit Notification Webhook" + + def get_notification_webhook(self, nwhid: int): + return get_object_or_404(Notification_Webhooks, id=nwhid) + + # TODO: Disable Owner if not superadmin + + def get_initial_context(self, request: HttpRequest, nwh: Notification_Webhooks): + return { + "name": "Edit Notification Webhook", + "user": request.user, + "form": self.get_form(request, instance=nwh), + "nwh": nwh, + } + + def process_form(self, request: HttpRequest, nwh: Notification_Webhooks, context: dict): + form = context["form"] + if "deactivate_webhook" in request.POST: # TODO: add this to API as well + nwh.status = Notification_Webhooks.Status.STATUS_INACTIVE_PERMANENT + nwh.first_error = None + nwh.last_error = None + nwh.note = "Deactivate from UI" + nwh.save() + messages.add_message( + request, + messages.SUCCESS, + _("Notification Webhook deactivated successfully."), + extra_tags="alert-success", + ) + return request, True + + if form.is_valid(): + try: + test_webhooks_notification(form.instance) + except requests.exceptions.RequestException as e: + messages.add_message( + request, + messages.ERROR, + _("Test of endpoint was not successful: %(error)s") % {"error": str(e)}, + extra_tags="alert-danger") + return request, False + else: + # correct definition reset defaults + nwh = form.save(commit=False) + nwh.status = Notification_Webhooks.Status.STATUS_ACTIVE + nwh.first_error = None + nwh.last_error = None + nwh.note = None + nwh.save() + messages.add_message( + request, + messages.SUCCESS, + _("Notification Webhook updated successfully."), + extra_tags="alert-success", + ) + return request, True + return request, False + + def get(self, request: HttpRequest, nwhid: int): + # Run common checks + super().preprocess_request(request) + nwh = self.get_notification_webhook(nwhid) + # Set up the initial context + context = self.get_initial_context(request, nwh) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + def post(self, request: HttpRequest, nwhid: int): + # Run common checks + super().preprocess_request(request) + nwh = self.get_notification_webhook(nwhid) + # Set up the initial context + context = self.get_initial_context(request, nwh) + # Determine the validity of the form + request, success = self.process_form(request, nwh, context) + if success: + return HttpResponseRedirect(reverse("notification_webhooks")) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + +class DeleteNotificationWebhooksView(NotificationWebhooksView): + template = "dojo/delete_notification_webhook.html" + permission = "dojo.delete_notification_webhooks" + # TODO: this could be better: @user_is_authorized(Finding, Permissions.Finding_Delete, 'fid') + breadcrumb = "Edit Notification Webhook" + + def get_notification_webhook(self, nwhid: int): + return get_object_or_404(Notification_Webhooks, id=nwhid) + + # TODO: Disable Owner if not superadmin + + def get_form( + self, + request: HttpRequest, + **kwargs: dict, + ) -> NotificationsWebhookForm: + if request.method == "POST": + return DeleteNotificationsWebhookForm(request.POST, **kwargs) + return DeleteNotificationsWebhookForm(**kwargs) + + def get_initial_context(self, request: HttpRequest, nwh: Notification_Webhooks): + return { + "form": self.get_form(request, instance=nwh), + "nwh": nwh, + } + + def process_form(self, request: HttpRequest, nwh: Notification_Webhooks, context: dict): + form = context["form"] + if form.is_valid(): + nwh.delete() + messages.add_message( + request, + messages.SUCCESS, + _("Notification Webhook deleted successfully."), + extra_tags="alert-success", + ) + return request, True + return request, False + + def get(self, request: HttpRequest, nwhid: int): + # Run common checks + super().preprocess_request(request) + nwh = self.get_notification_webhook(nwhid) + # Set up the initial context + context = self.get_initial_context(request, nwh) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) + + def post(self, request: HttpRequest, nwhid: int): + # Run common checks + super().preprocess_request(request) + nwh = self.get_notification_webhook(nwhid) + # Set up the initial context + context = self.get_initial_context(request, nwh) + # Determine the validity of the form + request, success = self.process_form(request, nwh, context) + if success: + return HttpResponseRedirect(reverse("notification_webhooks")) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.template, context) diff --git a/dojo/object/views.py b/dojo/object/views.py index dfb4f590556..0cca584b0be 100644 --- a/dojo/object/views.py +++ b/dojo/object/views.py @@ -30,14 +30,14 @@ def new_object(request, pid): "Added Tracked File to a Product", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) - else: - tform = ObjectSettingsForm() - product_tab = Product_Tab(prod, title="Add Tracked Files to a Product", tab="settings") + return None + tform = ObjectSettingsForm() + product_tab = Product_Tab(prod, title="Add Tracked Files to a Product", tab="settings") - return render(request, "dojo/new_object.html", - {"tform": tform, - "product_tab": product_tab, - "pid": prod.id}) + return render(request, "dojo/new_object.html", + {"tform": tform, + "product_tab": product_tab, + "pid": prod.id}) @user_is_authorized(Product, Permissions.Product_Tracking_Files_View, "pid") @@ -101,8 +101,7 @@ def delete_object(request, pid, ttid): "Tracked Product Files Deleted.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) - else: - tform = DeleteObjectsSettingsForm(instance=object) + tform = DeleteObjectsSettingsForm(instance=object) product_tab = Product_Tab(product, title="Delete Product Tool Configuration", tab="settings") return render(request, diff --git a/dojo/pipeline.py b/dojo/pipeline.py index ea020d2d926..ee2dc0ae186 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -31,7 +31,7 @@ def social_uid(backend, details, response, *args, **kwargs): "first_name": first_name, "last_name": last_name, "uid": uid} - elif settings.GOOGLE_OAUTH_ENABLED and isinstance(backend, GoogleOAuth2): + if settings.GOOGLE_OAUTH_ENABLED and isinstance(backend, GoogleOAuth2): """Return user details from Google account""" if "sub" in response: google_uid = response["sub"] @@ -51,15 +51,13 @@ def social_uid(backend, details, response, *args, **kwargs): "first_name": first_name, "last_name": last_name, "uid": google_uid} - else: - uid = backend.get_user_id(details, response) - # Used for most backends - if uid: - return {"uid": uid} - # Until OKTA PR in social-core is merged - # This modified way needs to work - else: - return {"uid": response.get("preferred_username")} + uid = backend.get_user_id(details, response) + # Used for most backends + if uid: + return {"uid": uid} + # Until OKTA PR in social-core is merged + # This modified way needs to work + return {"uid": response.get("preferred_username")} def modify_permissions(backend, uid, user=None, social=None, *args, **kwargs): @@ -107,8 +105,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): def is_group_id(group): if re.search(r"^[a-zA-Z0-9]{8,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{4,}-[a-zA-Z0-9]{12,}$", group): return True - else: - return False + return False def assign_user_to_groups(user, group_names, social_provider): @@ -183,7 +180,6 @@ def sanitize_username(username): def create_user(strategy, details, backend, user=None, *args, **kwargs): if not settings.SOCIAL_AUTH_CREATE_USER: - return - else: - details["username"] = sanitize_username(details.get("username")) - return social_core.pipeline.user.create_user(strategy, details, backend, user, args, kwargs) + return None + details["username"] = sanitize_username(details.get("username")) + return social_core.pipeline.user.create_user(strategy, details, backend, user, args, kwargs) diff --git a/dojo/product/queries.py b/dojo/product/queries.py index 8d562c0f9a4..69532212a59 100644 --- a/dojo/product/queries.py +++ b/dojo/product/queries.py @@ -13,6 +13,7 @@ App_Analysis, DojoMeta, Engagement_Presets, + Global_Role, Languages, Product, Product_API_Scan_Configuration, @@ -59,20 +60,25 @@ def get_authorized_products(permission, user=None): member=Exists(authorized_product_roles), prod_type__authorized_group=Exists(authorized_product_type_groups), authorized_group=Exists(authorized_product_groups)).order_by("name") - products = products.filter( + return products.filter( Q(prod_type__member=True) | Q(member=True) | Q(prod_type__authorized_group=True) | Q(authorized_group=True)) - return products - def get_authorized_members_for_product(product, permission): user = get_current_user() if user.is_superuser or user_has_permission(user, product, permission): return Product_Member.objects.filter(product=product).order_by("user__first_name", "user__last_name").select_related("role", "user") - else: - return None + return Product_Member.objects.none() + + +def get_authorized_global_members_for_product(product, permission): + user = get_current_user() + + if user.is_superuser or user_has_permission(user, product, permission): + return Global_Role.objects.filter(group=None, role__isnull=False).order_by("user__first_name", "user__last_name").select_related("role", "user") + return Global_Role.objects.none() def get_authorized_groups_for_product(product, permission): @@ -81,8 +87,15 @@ def get_authorized_groups_for_product(product, permission): if user.is_superuser or user_has_permission(user, product, permission): authorized_groups = get_authorized_groups(Permissions.Group_View) return Product_Group.objects.filter(product=product, group__in=authorized_groups).order_by("group__name").select_related("role") - else: - return None + return Product_Group.objects.none() + + +def get_authorized_global_groups_for_product(product, permission): + user = get_current_user() + + if user.is_superuser or user_has_permission(user, product, permission): + return Global_Role.objects.filter(user=None, role__isnull=False).order_by("group__name").select_related("role") + return Global_Role.objects.none() def get_authorized_product_members(permission): @@ -164,12 +177,10 @@ def get_authorized_app_analysis(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - app_analysis = app_analysis.filter( + return app_analysis.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return app_analysis - def get_authorized_dojo_meta(permission): user = get_current_user() @@ -246,7 +257,7 @@ def get_authorized_dojo_meta(permission): finding__test__engagement__product__prod_type__authorized_group=Exists(finding_authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists(finding_authorized_product_groups), ).order_by("id") - dojo_meta = dojo_meta.filter( + return dojo_meta.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) @@ -260,8 +271,6 @@ def get_authorized_dojo_meta(permission): | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) - return dojo_meta - def get_authorized_languages(permission): user = get_current_user() @@ -297,12 +306,10 @@ def get_authorized_languages(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - languages = languages.filter( + return languages.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return languages - def get_authorized_engagement_presets(permission): user = get_current_user() @@ -338,12 +345,10 @@ def get_authorized_engagement_presets(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - engagement_presets = engagement_presets.filter( + return engagement_presets.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return engagement_presets - def get_authorized_product_api_scan_configurations(permission): user = get_current_user() @@ -379,8 +384,6 @@ def get_authorized_product_api_scan_configurations(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - product_api_scan_configurations = product_api_scan_configurations.filter( + return product_api_scan_configurations.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return product_api_scan_configurations diff --git a/dojo/product/signals.py b/dojo/product/signals.py index 6871f5490d2..72e9771e82c 100644 --- a/dojo/product/signals.py +++ b/dojo/product/signals.py @@ -16,7 +16,9 @@ def product_post_save(sender, instance, created, **kwargs): create_notification(event="product_added", title=instance.name, product=instance, - url=reverse("view_product", args=(instance.id,))) + url=reverse("view_product", args=(instance.id,)), + url_api=reverse("product-detail", args=(instance.id,)), + ) @receiver(post_delete, sender=Product) diff --git a/dojo/product/views.py b/dojo/product/views.py index e887938d450..81386940a6d 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -92,6 +92,8 @@ Test_Type, ) from dojo.product.queries import ( + get_authorized_global_groups_for_product, + get_authorized_global_members_for_product, get_authorized_groups_for_product, get_authorized_members_for_product, get_authorized_products, @@ -213,8 +215,10 @@ def view_product(request, pid): .prefetch_related("prod_type__members") prod = get_object_or_404(prod_query, id=pid) product_members = get_authorized_members_for_product(prod, Permissions.Product_View) + global_product_members = get_authorized_global_members_for_product(prod, Permissions.Product_View) product_type_members = get_authorized_members_for_product_type(prod.prod_type, Permissions.Product_Type_View) product_groups = get_authorized_groups_for_product(prod, Permissions.Product_View) + global_product_groups = get_authorized_global_groups_for_product(prod, Permissions.Product_View) product_type_groups = get_authorized_groups_for_product_type(prod.prod_type, Permissions.Product_Type_View) personal_notifications_form = ProductNotificationsForm( instance=Notifications.objects.filter(user=request.user).filter(product=prod).first()) @@ -291,8 +295,10 @@ def view_product(request, pid): "benchmarks_percents": benchAndPercent, "benchmarks": benchmarks, "product_members": product_members, + "global_product_members": global_product_members, "product_type_members": product_type_members, "product_groups": product_groups, + "global_product_groups": global_product_groups, "product_type_groups": product_type_groups, "personal_notifications_form": personal_notifications_form, "enabled_notifications": get_enabled_notifications_list(), @@ -349,11 +355,10 @@ def identify_view(request): return view msg = 'invalid view, view must be "Endpoint" or "Finding"' raise ValueError(msg) - else: - if get_data.get("finding__severity", None): - return "Endpoint" - elif get_data.get("false_positive", None): - return "Endpoint" + if get_data.get("finding__severity", None): + return "Endpoint" + if get_data.get("false_positive", None): + return "Endpoint" referer = request.META.get("HTTP_REFERER", None) if referer: if referer.find("type=Endpoint") > -1: @@ -904,9 +909,8 @@ def new_product(request, ptid=None): if not error: return HttpResponseRedirect(reverse("view_product", args=(product.id,))) - else: - # engagement was saved, but JIRA errors, so goto edit_product - return HttpResponseRedirect(reverse("edit_product", args=(product.id,))) + # engagement was saved, but JIRA errors, so goto edit_product + return HttpResponseRedirect(reverse("edit_product", args=(product.id,))) else: if get_system_setting("enable_jira"): jira_project_form = JIRAProjectForm() @@ -1029,9 +1033,8 @@ def delete_product(request, pid): extra_tags="alert-success") logger.debug("delete_product: POST RETURN") return HttpResponseRedirect(reverse("product")) - else: - logger.debug("delete_product: POST INVALID FORM") - logger.error(form.errors) + logger.debug("delete_product: POST INVALID FORM") + logger.error(form.errors) logger.debug("delete_product: GET") @@ -1104,16 +1107,13 @@ def new_eng_for_app(request, pid, cicd=False): if not error: if "_Add Tests" in request.POST: return HttpResponseRedirect(reverse("add_tests", args=(engagement.id,))) - elif "_Import Scan Results" in request.POST: + if "_Import Scan Results" in request.POST: return HttpResponseRedirect(reverse("import_scan_results", args=(engagement.id,))) - else: - return HttpResponseRedirect(reverse("view_engagement", args=(engagement.id,))) - else: - # engagement was saved, but JIRA errors, so goto edit_engagement - logger.debug("new_eng_for_app: jira errors") - return HttpResponseRedirect(reverse("edit_engagement", args=(engagement.id,))) - else: - logger.debug(form.errors) + return HttpResponseRedirect(reverse("view_engagement", args=(engagement.id,))) + # engagement was saved, but JIRA errors, so goto edit_engagement + logger.debug("new_eng_for_app: jira errors") + return HttpResponseRedirect(reverse("edit_engagement", args=(engagement.id,))) + logger.debug(form.errors) else: form = EngForm(initial={"lead": request.user, "target_start": timezone.now().date(), "target_end": timezone.now().date() + timedelta(days=7), "product": product}, cicd=cicd, @@ -1223,8 +1223,7 @@ def add_meta_data(request, pid): extra_tags="alert-success") if "add_another" in request.POST: return HttpResponseRedirect(reverse("add_meta_data", args=(pid,))) - else: - return HttpResponseRedirect(reverse("view_product", args=(pid,))) + return HttpResponseRedirect(reverse("view_product", args=(pid,))) else: form = DojoMetaDataForm() @@ -1288,12 +1287,11 @@ def get_engagement(self, product: Product): def get_test(self, engagement: Engagement, test_type: Test_Type): if test := Test.objects.filter(engagement=engagement).first(): return test - else: - return Test.objects.create( - engagement=engagement, - test_type=test_type, - target_start=timezone.now(), - target_end=timezone.now()) + return Test.objects.create( + engagement=engagement, + test_type=test_type, + target_start=timezone.now(), + target_end=timezone.now()) def create_nested_objects(self, product: Product): engagement = self.get_engagement(product) @@ -1406,9 +1404,8 @@ def process_finding_form(self, request: HttpRequest, test: Test, context: dict): finding.save() return finding, request, True - else: - add_error_message_to_response("The form has errors, please correct them below.") - add_field_errors_to_response(context["form"]) + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) return finding, request, False @@ -1451,8 +1448,7 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic ) return request, True, push_to_jira - else: - add_field_errors_to_response(context["jform"]) + add_field_errors_to_response(context["jform"]) return request, False, False @@ -1464,8 +1460,7 @@ def process_github_form(self, request: HttpRequest, finding: Finding, context: d add_external_issue(finding, "github") return request, True - else: - add_field_errors_to_response(context["gform"]) + add_field_errors_to_response(context["gform"]) return request, False @@ -1537,10 +1532,8 @@ def post(self, request: HttpRequest, product_id: int): if success: if "_Finished" in request.POST: return HttpResponseRedirect(reverse("view_test", args=(test.id,))) - else: - return HttpResponseRedirect(reverse("add_findings", args=(test.id,))) - else: - context["form_error"] = True + return HttpResponseRedirect(reverse("add_findings", args=(test.id,))) + context["form_error"] = True # Render the form return render(request, self.get_template(), context) @@ -1720,8 +1713,7 @@ def edit_product_member(request, memberid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id,))) - else: - return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) + return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) product_tab = Product_Tab(member.product, title=_("Edit Product Member"), tab="settings") return render(request, "dojo/edit_product_member.html", { "memberid": memberid, @@ -1745,11 +1737,9 @@ def delete_product_member(request, memberid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id,))) - else: - if user == request.user: - return HttpResponseRedirect(reverse("product")) - else: - return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) + if user == request.user: + return HttpResponseRedirect(reverse("product")) + return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) product_tab = Product_Tab(member.product, title=_("Delete Product Member"), tab="settings") return render(request, "dojo/delete_product_member.html", { "memberid": memberid, @@ -1781,8 +1771,7 @@ def add_api_scan_configuration(request, pid): extra_tags="alert-success") if "add_another" in request.POST: return HttpResponseRedirect(reverse("add_api_scan_configuration", args=(pid,))) - else: - return HttpResponseRedirect(reverse("view_api_scan_configurations", args=(pid,))) + return HttpResponseRedirect(reverse("view_api_scan_configurations", args=(pid,))) except Exception as e: logger.exception(e) messages.add_message(request, @@ -1879,8 +1868,7 @@ def delete_api_scan_configuration(request, pid, pascid): _("API Scan Configuration deleted."), extra_tags="alert-success") return HttpResponseRedirect(reverse("view_api_scan_configurations", args=(pid,))) - else: - form = DeleteProduct_API_Scan_ConfigurationForm(instance=product_api_scan_configuration) + form = DeleteProduct_API_Scan_ConfigurationForm(instance=product_api_scan_configuration) product_tab = Product_Tab(get_object_or_404(Product, id=pid), title=_("Delete Tool Configuration"), tab="settings") return render(request, @@ -1914,8 +1902,7 @@ def edit_product_group(request, groupid): extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) - else: - return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) + return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) product_tab = Product_Tab(group.product, title=_("Edit Product Group"), tab="settings") return render(request, "dojo/edit_product_group.html", { @@ -1940,10 +1927,9 @@ def delete_product_group(request, groupid): extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) - else: - # TODO: If user was in the group that was deleted and no longer has access, redirect back to product listing - # page - return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) + # TODO: If user was in the group that was deleted and no longer has access, redirect back to product listing + # page + return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) product_tab = Product_Tab(group.product, title=_("Delete Product Group"), tab="settings") return render(request, "dojo/delete_product_group.html", { diff --git a/dojo/product_type/queries.py b/dojo/product_type/queries.py index 737584a5b05..1d95ac81170 100644 --- a/dojo/product_type/queries.py +++ b/dojo/product_type/queries.py @@ -9,7 +9,7 @@ ) from dojo.authorization.roles_permissions import Permissions from dojo.group.queries import get_authorized_groups -from dojo.models import Product_Type, Product_Type_Group, Product_Type_Member +from dojo.models import Global_Role, Product_Type, Product_Type_Group, Product_Type_Member def get_authorized_product_types(permission): @@ -35,9 +35,7 @@ def get_authorized_product_types(permission): product_types = Product_Type.objects.annotate( member=Exists(authorized_roles), authorized_group=Exists(authorized_groups)).order_by("name") - product_types = product_types.filter(Q(member=True) | Q(authorized_group=True)) - - return product_types + return product_types.filter(Q(member=True) | Q(authorized_group=True)) def get_authorized_members_for_product_type(product_type, permission): @@ -45,8 +43,15 @@ def get_authorized_members_for_product_type(product_type, permission): if user.is_superuser or user_has_permission(user, product_type, permission): return Product_Type_Member.objects.filter(product_type=product_type).order_by("user__first_name", "user__last_name").select_related("role", "product_type", "user") - else: - return None + return Product_Type_Member.objects.none() + + +def get_authorized_global_members_for_product_type(product_type, permission): + user = get_current_user() + + if user.is_superuser or user_has_permission(user, product_type, permission): + return Global_Role.objects.filter(group=None, role__isnull=False).order_by("user__first_name", "user__last_name").select_related("role", "user") + return Global_Role.objects.none() def get_authorized_groups_for_product_type(product_type, permission): @@ -55,8 +60,15 @@ def get_authorized_groups_for_product_type(product_type, permission): if user.is_superuser or user_has_permission(user, product_type, permission): authorized_groups = get_authorized_groups(Permissions.Group_View) return Product_Type_Group.objects.filter(product_type=product_type, group__in=authorized_groups).order_by("group__name").select_related("role", "group") - else: - return None + return Product_Type_Group.objects.none() + + +def get_authorized_global_groups_for_product_type(product_type, permission): + user = get_current_user() + + if user.is_superuser or user_has_permission(user, product_type, permission): + return Global_Role.objects.filter(user=None, role__isnull=False).order_by("group__name").select_related("role", "group") + return Global_Role.objects.none() def get_authorized_product_type_members(permission): diff --git a/dojo/product_type/signals.py b/dojo/product_type/signals.py index dde3ff502cd..743995768eb 100644 --- a/dojo/product_type/signals.py +++ b/dojo/product_type/signals.py @@ -16,7 +16,9 @@ def product_type_post_save(sender, instance, created, **kwargs): create_notification(event="product_type_added", title=instance.name, product_type=instance, - url=reverse("view_product_type", args=(instance.id,))) + url=reverse("view_product_type", args=(instance.id,)), + url_api=reverse("product_type-detail", args=(instance.id,)), + ) @receiver(post_delete, sender=Product_Type) diff --git a/dojo/product_type/views.py b/dojo/product_type/views.py index 302aa6dbbf9..8d731245ddc 100644 --- a/dojo/product_type/views.py +++ b/dojo/product_type/views.py @@ -27,6 +27,8 @@ from dojo.models import Product_Type, Product_Type_Group, Product_Type_Member, Role from dojo.product.queries import get_authorized_products from dojo.product_type.queries import ( + get_authorized_global_groups_for_product_type, + get_authorized_global_members_for_product_type, get_authorized_groups_for_product_type, get_authorized_members_for_product_type, get_authorized_product_types, @@ -117,7 +119,9 @@ def view_product_type(request, ptid): page_name = _("View Product Type") pt = get_object_or_404(Product_Type, pk=ptid) members = get_authorized_members_for_product_type(pt, Permissions.Product_Type_View) + global_members = get_authorized_global_members_for_product_type(pt, Permissions.Product_Type_View) groups = get_authorized_groups_for_product_type(pt, Permissions.Product_Type_View) + global_groups = get_authorized_global_groups_for_product_type(pt, Permissions.Product_Type_View) products = get_authorized_products(Permissions.Product_View).filter(prod_type=pt) products = get_page_items(request, products, 25) add_breadcrumb(title=page_name, top_level=False, request=request) @@ -126,7 +130,10 @@ def view_product_type(request, ptid): "pt": pt, "products": products, "groups": groups, - "members": members}) + "members": members, + "global_groups": global_groups, + "global_members": global_members, + }) @user_is_authorized(Product_Type, Permissions.Product_Type_Delete, "ptid") @@ -242,8 +249,7 @@ def edit_product_type_member(request, memberid): extra_tags="alert-warning") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) + return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) if member.role.is_owner and not user_has_permission(request.user, member.product_type, Permissions.Product_Type_Member_Add_Owner): messages.add_message(request, messages.WARNING, @@ -257,8 +263,7 @@ def edit_product_type_member(request, memberid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) + return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/edit_product_type_member.html", { "name": page_name, @@ -292,11 +297,9 @@ def delete_product_type_member(request, memberid): extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) - else: - if user == request.user: - return HttpResponseRedirect(reverse("product_type")) - else: - return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) + if user == request.user: + return HttpResponseRedirect(reverse("product_type")) + return HttpResponseRedirect(reverse("view_product_type", args=(member.product_type.id, ))) add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/delete_product_type_member.html", { "name": page_name, @@ -365,8 +368,7 @@ def edit_product_type_group(request, groupid): extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) - else: - return HttpResponseRedirect(reverse("view_product_type", args=(group.product_type.id,))) + return HttpResponseRedirect(reverse("view_product_type", args=(group.product_type.id,))) add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/edit_product_type_group.html", { @@ -392,10 +394,9 @@ def delete_product_type_group(request, groupid): extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id, ))) - else: - # TODO: If user was in the group that was deleted and no longer has access, redirect them to the product - # types page - return HttpResponseRedirect(reverse("view_product_type", args=(group.product_type.id, ))) + # TODO: If user was in the group that was deleted and no longer has access, redirect them to the product + # types page + return HttpResponseRedirect(reverse("view_product_type", args=(group.product_type.id, ))) add_breadcrumb(page_name, top_level=False, request=request) return render(request, "dojo/delete_product_type_group.html", { diff --git a/dojo/regulations/views.py b/dojo/regulations/views.py index f4d5004d074..e9a5f1a9f55 100644 --- a/dojo/regulations/views.py +++ b/dojo/regulations/views.py @@ -45,7 +45,7 @@ def edit_regulations(request, ttid): "Regulation Deleted.", extra_tags="alert-success") return HttpResponseRedirect(reverse("regulations")) - elif request.method == "POST": + if request.method == "POST": tform = RegulationForm(request.POST, instance=regulation) if tform.is_valid(): tform.save() diff --git a/dojo/remote_user.py b/dojo/remote_user.py index 44355d9f453..764af4e548b 100644 --- a/dojo/remote_user.py +++ b/dojo/remote_user.py @@ -20,32 +20,28 @@ def authenticate(self, request): self.header = settings.AUTH_REMOTEUSER_USERNAME_HEADER if self.header in request.META: return super().authenticate(request) - else: - return None - else: - logger.debug("Requested came from untrusted proxy %s; This is list of trusted proxies: %s", - IPAddress(request.META["REMOTE_ADDR"]), - settings.AUTH_REMOTEUSER_TRUSTED_PROXY) return None + logger.debug("Requested came from untrusted proxy %s; This is list of trusted proxies: %s", + IPAddress(request.META["REMOTE_ADDR"]), + settings.AUTH_REMOTEUSER_TRUSTED_PROXY) + return None class RemoteUserMiddleware(OriginalRemoteUserMiddleware): def process_request(self, request): if not settings.AUTH_REMOTEUSER_ENABLED: - return + return None # process only if request is comming from the trusted proxy node if IPAddress(request.META["REMOTE_ADDR"]) in settings.AUTH_REMOTEUSER_TRUSTED_PROXY: self.header = settings.AUTH_REMOTEUSER_USERNAME_HEADER if self.header in request.META: return super().process_request(request) - else: - return - else: - logger.debug("Requested came from untrusted proxy %s; This is list of trusted proxies: %s", - IPAddress(request.META["REMOTE_ADDR"]), - settings.AUTH_REMOTEUSER_TRUSTED_PROXY) - return + return None + logger.debug("Requested came from untrusted proxy %s; This is list of trusted proxies: %s", + IPAddress(request.META["REMOTE_ADDR"]), + settings.AUTH_REMOTEUSER_TRUSTED_PROXY) + return None class PersistentRemoteUserMiddleware(RemoteUserMiddleware): diff --git a/dojo/reports/views.py b/dojo/reports/views.py index ca13eae54ca..aacf4369333 100644 --- a/dojo/reports/views.py +++ b/dojo/reports/views.py @@ -122,8 +122,7 @@ def post(self, request: HttpRequest) -> HttpResponse: if form.is_valid(): self._set_state(request) return render(request, self.get_template(), self.get_context()) - else: - raise PermissionDenied + raise PermissionDenied def _set_state(self, request: HttpRequest): self.request = request @@ -154,8 +153,7 @@ def get_form(self, request): def get_template(self): if self.report_format == "HTML": return "dojo/custom_html_report.html" - else: - raise PermissionDenied + raise PermissionDenied def get_context(self): return { @@ -310,8 +308,7 @@ def product_endpoint_report(request, pid): "user": request.user, "title": "Generate Report", }) - else: - raise Http404 + raise Http404 product_tab = Product_Tab(product, "Product Endpoint Report", tab="endpoints") return render(request, @@ -351,9 +348,8 @@ def generate_report(request, obj, host_view=False): if obj is None: msg = "No object is given to generate report for" raise Exception(msg) - else: - msg = f"Report cannot be generated for object of type {type(obj).__name__}" - raise Exception(msg) + msg = f"Report cannot be generated for object of type {type(obj).__name__}" + raise Exception(msg) report_format = request.GET.get("report_type", "HTML") include_finding_notes = int(request.GET.get("include_finding_notes", 0)) @@ -584,8 +580,7 @@ def generate_report(request, obj, host_view=False): "context": context, }) - else: - raise Http404 + raise Http404 paged_findings = get_page_items(request, findings.qs.distinct().order_by("numerical_severity"), 25) product_tab = None @@ -654,9 +649,8 @@ def get_findings(request): if not url: msg = "Please use the report button when viewing findings" raise Http404(msg) - else: - if url.startswith("url="): - url = url[4:] + if url.startswith("url="): + url = url[4:] views = ["all", "open", "inactive", "verified", "closed", "accepted", "out_of_scope", diff --git a/dojo/risk_acceptance/helper.py b/dojo/risk_acceptance/helper.py index a1d628b33df..1cd1b15cdae 100644 --- a/dojo/risk_acceptance/helper.py +++ b/dojo/risk_acceptance/helper.py @@ -1,4 +1,5 @@ import logging +from contextlib import suppress from dateutil.relativedelta import relativedelta from django.core.exceptions import PermissionDenied @@ -8,7 +9,7 @@ import dojo.jira_link.helper as jira_helper from dojo.celery import app from dojo.jira_link.helper import escape_for_jira -from dojo.models import Finding, Risk_Acceptance, System_Settings +from dojo.models import Dojo_User, Finding, Notes, Risk_Acceptance, System_Settings from dojo.notifications.helper import create_notification from dojo.utils import get_full_url, get_system_setting @@ -102,7 +103,7 @@ def delete(eng, risk_acceptance): risk_acceptance.delete() -def remove_finding_from_risk_acceptance(risk_acceptance, finding): +def remove_finding_from_risk_acceptance(user: Dojo_User, risk_acceptance: Risk_Acceptance, finding: Finding) -> None: logger.debug("removing finding %i from risk acceptance %i", finding.id, risk_acceptance.id) risk_acceptance.accepted_findings.remove(finding) finding.active = True @@ -112,9 +113,20 @@ def remove_finding_from_risk_acceptance(risk_acceptance, finding): finding.save(dedupe_option=False) # best effort jira integration, no status changes post_jira_comments(risk_acceptance, [finding], unaccepted_message_creator) + # Add a note to reflect that the finding was removed from the risk acceptance + if user is not None: + finding.notes.add(Notes.objects.create( + entry=( + f"{Dojo_User.generate_full_name(user)} ({user.id}) removed this finding from the risk acceptance: " + f'"{risk_acceptance.name}" ({get_view_risk_acceptance(risk_acceptance)})' + ), + author=user, + )) + return -def add_findings_to_risk_acceptance(risk_acceptance, findings): + +def add_findings_to_risk_acceptance(user: Dojo_User, risk_acceptance: Risk_Acceptance, findings: list[Finding]) -> None: for finding in findings: if not finding.duplicate or finding.risk_accepted: finding.active = False @@ -123,11 +135,21 @@ def add_findings_to_risk_acceptance(risk_acceptance, findings): # Update any endpoint statuses on each of the findings update_endpoint_statuses(finding, accept_risk=True) risk_acceptance.accepted_findings.add(finding) + # Add a note to reflect that the finding was removed from the risk acceptance + if user is not None: + finding.notes.add(Notes.objects.create( + entry=( + f"{Dojo_User.generate_full_name(user)} ({user.id}) added this finding to the risk acceptance: " + f'"{risk_acceptance.name}" ({get_view_risk_acceptance(risk_acceptance)})' + ), + author=user, + )) risk_acceptance.save() - # best effort jira integration, no status changes post_jira_comments(risk_acceptance, findings, accepted_message_creator) + return + @app.task def expiration_handler(*args, **kwargs): @@ -174,6 +196,16 @@ def expiration_handler(*args, **kwargs): risk_acceptance.save() +def get_view_risk_acceptance(risk_acceptance: Risk_Acceptance) -> str: + """Return the full qualified URL of the view risk acceptance page.""" + # Suppressing this error because it does not happen under most circumstances that a risk acceptance does not have engagement + with suppress(AttributeError): + get_full_url( + reverse("view_risk_acceptance", args=(risk_acceptance.engagement.id, risk_acceptance.id)), + ) + return "" + + def expiration_message_creator(risk_acceptance, heads_up_days=0): return "Risk acceptance [({})|{}] with {} findings has expired".format( escape_for_jira(risk_acceptance.name), @@ -201,16 +233,14 @@ def accepted_message_creator(risk_acceptance, heads_up_days=0): escape_for_jira(risk_acceptance.name), get_full_url(reverse("view_risk_acceptance", args=(risk_acceptance.engagement.id, risk_acceptance.id))), len(risk_acceptance.accepted_findings.all()), timezone.localtime(risk_acceptance.expiration_date).strftime("%b %d, %Y")) - else: - return "Finding has been risk accepted" + return "Finding has been risk accepted" def unaccepted_message_creator(risk_acceptance, heads_up_days=0): if risk_acceptance: return "finding was unaccepted/deleted from risk acceptance [({})|{}]".format(escape_for_jira(risk_acceptance.name), get_full_url(reverse("view_risk_acceptance", args=(risk_acceptance.engagement.id, risk_acceptance.id)))) - else: - return "Finding is no longer risk accepted" + return "Finding is no longer risk accepted" def post_jira_comment(finding, message_factory, heads_up_days=0): @@ -267,7 +297,7 @@ def prefetch_for_expiration(risk_acceptances): ) -def simple_risk_accept(finding, perform_save=True): +def simple_risk_accept(user: Dojo_User, finding: Finding, perform_save=True) -> None: if not finding.test.engagement.product.enable_simple_risk_acceptance: raise PermissionDenied @@ -282,9 +312,15 @@ def simple_risk_accept(finding, perform_save=True): # post_jira_comment might reload from database so see unaccepted finding. but the comment # only contains some text so that's ok post_jira_comment(finding, accepted_message_creator) + # Add a note to reflect that the finding was removed from the risk acceptance + if user is not None: + finding.notes.add(Notes.objects.create( + entry=(f"{Dojo_User.generate_full_name(user)} ({user.id}) has risk accepted this finding"), + author=user, + )) -def risk_unaccept(finding, perform_save=True): +def risk_unaccept(user: Dojo_User, finding: Finding, perform_save=True) -> None: logger.debug("unaccepting finding %i:%s if it is currently risk accepted", finding.id, finding) if finding.risk_accepted: logger.debug("unaccepting finding %i:%s", finding.id, finding) @@ -302,6 +338,12 @@ def risk_unaccept(finding, perform_save=True): # post_jira_comment might reload from database so see unaccepted finding. but the comment # only contains some text so that's ok post_jira_comment(finding, unaccepted_message_creator) + # Add a note to reflect that the finding was removed from the risk acceptance + if user is not None: + finding.notes.add(Notes.objects.create( + entry=(f"{Dojo_User.generate_full_name(user)} ({user.id}) removed a risk exception from this finding"), + author=user, + )) def remove_from_any_risk_acceptance(finding): diff --git a/dojo/risk_acceptance/queries.py b/dojo/risk_acceptance/queries.py index 9cbf89fb5c2..72282af21e7 100644 --- a/dojo/risk_acceptance/queries.py +++ b/dojo/risk_acceptance/queries.py @@ -39,8 +39,6 @@ def get_authorized_risk_acceptances(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - risk_acceptances = risk_acceptances.filter( + return risk_acceptances.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return risk_acceptances diff --git a/dojo/search/views.py b/dojo/search/views.py index 3e3a75923ca..604e9ecd68c 100644 --- a/dojo/search/views.py +++ b/dojo/search/views.py @@ -31,6 +31,45 @@ def simple_search(request): + + """ + query: some keywords + operators: {} + keywords: ['some', 'keywords'] + + query: some key-word + operators: {} + keywords: ['some', 'key-word'] + + query: keyword with "space inside" + operators: {} + keywords: ['keyword', 'with', 'space inside'] + + query: tag:anchore word tags:php + operators: {'tag': ['anchore'], 'tags': ['php']} + keywords: ['word'] + + query: tags:php,magento + operators: {'tags': ['php,magento']} + keywords: [] + + query: tags:php tags:magento + operators: {'tags': ['php', 'magento']} + keywords: [] + + query: tags:"php, magento" + operators: {'tags': ['php, magento']} + keywords: [] + + query: tags:anchorse some "space inside" + operators: {'tags': ['anchorse']} + keywords: ['some', 'space inside'] + + query: tags:anchore vulnerability_id:CVE-2020-1234 jquery + operators: {'tags': ['anchore'], 'vulnerability_id': ['CVE-2020-1234']} + keywords: ['jquery'] + """ + tests = None findings = None finding_templates = None @@ -364,44 +403,6 @@ def simple_search(request): response.delete_cookie("highlight", path="/") return response - """ - query: some keywords - operators: {} - keywords: ['some', 'keywords'] - - query: some key-word - operators: {} - keywords: ['some', 'key-word'] - - query: keyword with "space inside" - operators: {} - keywords: ['keyword', 'with', 'space inside'] - - query: tag:anchore word tags:php - operators: {'tag': ['anchore'], 'tags': ['php']} - keywords: ['word'] - - query: tags:php,magento - operators: {'tags': ['php,magento']} - keywords: [] - - query: tags:php tags:magento - operators: {'tags': ['php', 'magento']} - keywords: [] - - query: tags:"php, magento" - operators: {'tags': ['php, magento']} - keywords: [] - - query: tags:anchorse some "space inside" - operators: {'tags': ['anchorse']} - keywords: ['some', 'space inside'] - - query: tags:anchore vulnerability_id:CVE-2020-1234 jquery - operators: {'tags': ['anchore'], 'vulnerability_id': ['CVE-2020-1234']} - keywords: ['jquery'] - """ - # it's not google grade parsing, but let's do some basic stuff right def parse_search_query(clean_query): @@ -448,8 +449,7 @@ def vulnerability_id_fix(keyword): if vulnerability_ids: return " ".join(vulnerability_ids) - else: - return keyword + return keyword def apply_tag_filters(qs, operators, skip_relations=False): diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index 878a104af54..8a22d6140cf 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -5adedc433a342d675492b86dc18786f72e167115f9718a397dc9b91c5fdc9c94 +bf2078296b31ba8c8376fdd88bbf1d552d0fba8b6e465a8552ac2fa901aa7e60 diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index ebf0283dd6a..826f5792b5c 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -139,7 +139,7 @@ DD_SOCIAL_AUTH_GITLAB_KEY=(str, ""), DD_SOCIAL_AUTH_GITLAB_SECRET=(str, ""), DD_SOCIAL_AUTH_GITLAB_API_URL=(str, "https://gitlab.com"), - DD_SOCIAL_AUTH_GITLAB_SCOPE=(list, ["read_user", "openid"]), + DD_SOCIAL_AUTH_GITLAB_SCOPE=(list, ["read_user", "openid", "read_api", "read_repository"]), DD_SOCIAL_AUTH_KEYCLOAK_OAUTH2_ENABLED=(bool, False), DD_SOCIAL_AUTH_KEYCLOAK_KEY=(str, ""), DD_SOCIAL_AUTH_KEYCLOAK_SECRET=(str, ""), @@ -1142,6 +1142,10 @@ def saml2_attrib_map_format(dict): "task": "dojo.risk_acceptance.helper.expiration_handler", "schedule": crontab(minute=0, hour="*/3"), # every 3 hours }, + "notification_webhook_status_cleanup": { + "task": "dojo.notifications.helper.webhook_status_cleanup", + "schedule": timedelta(minutes=1), + }, # 'jira_status_reconciliation': { # 'task': 'dojo.tasks.jira_status_reconciliation_task', # 'schedule': timedelta(hours=12), @@ -1152,7 +1156,6 @@ def saml2_attrib_map_format(dict): # 'schedule': timedelta(hours=12) # }, - } # ------------------------------------ @@ -1279,6 +1282,9 @@ def saml2_attrib_map_format(dict): "AppCheck Web Application Scanner": ["title", "severity"], "Legitify Scan": ["title", "endpoints", "severity"], "ThreatComposer Scan": ["title", "description"], + "Invicti Scan": ["title", "description", "severity"], + "HackerOne Cases": ["title", "severity"], + "KrakenD Audit Scan": ["description", "mitigation", "severity"], } # Override the hardcoded settings here via the env var @@ -1495,7 +1501,7 @@ def saml2_attrib_map_format(dict): "OSV Scan": DEDUPE_ALGO_HASH_CODE, "Nosey Parker Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, "Bearer CLI": DEDUPE_ALGO_HASH_CODE, - "Wiz Scan": DEDUPE_ALGO_HASH_CODE, + "Wiz Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, "Deepfence Threatmapper Report": DEDUPE_ALGO_HASH_CODE, "Kubescape JSON Importer": DEDUPE_ALGO_HASH_CODE, "Kiuwan SCA Scan": DEDUPE_ALGO_HASH_CODE, @@ -1503,6 +1509,8 @@ def saml2_attrib_map_format(dict): "AppCheck Web Application Scanner": DEDUPE_ALGO_HASH_CODE, "Legitify Scan": DEDUPE_ALGO_HASH_CODE, "ThreatComposer Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, + "Invicti Scan": DEDUPE_ALGO_HASH_CODE, + "KrakenD Audit Scan": DEDUPE_ALGO_HASH_CODE, } # Override the hardcoded settings here via the env var diff --git a/dojo/sla_config/views.py b/dojo/sla_config/views.py index f95461283fa..c07e8dadc2a 100644 --- a/dojo/sla_config/views.py +++ b/dojo/sla_config/views.py @@ -56,14 +56,13 @@ def edit_sla_config(request, slaid): "SLA Configuration Deleted.", extra_tags="alert-success") return HttpResponseRedirect(reverse("sla_config")) - else: - messages.add_message(request, - messages.ERROR, - "The Default SLA Configuration cannot be deleted.", - extra_tags="alert-danger") - return HttpResponseRedirect(reverse("sla_config")) + messages.add_message(request, + messages.ERROR, + "The Default SLA Configuration cannot be deleted.", + extra_tags="alert-danger") + return HttpResponseRedirect(reverse("sla_config")) - elif request.method == "POST": + if request.method == "POST": form = SLAConfigForm(request.POST, instance=sla_config) if form.is_valid(): form.save(commit=True) diff --git a/dojo/static/dojo/js/metrics.js b/dojo/static/dojo/js/metrics.js index 2e95555d379..2fd518aa3a1 100644 --- a/dojo/static/dojo/js/metrics.js +++ b/dojo/static/dojo/js/metrics.js @@ -103,11 +103,16 @@ function homepage_severity_plot(critical, high, medium, low) { dashboard-metrics.html */ +function getTicks(critical, high, medium, low) { + return [...new Set(critical.concat(high, medium, low).map(x => x[0]))] +} + function opened_per_month(critical, high, medium, low) { var options = { xaxes: [{ mode: 'time', - timeformat: "%m/%y" + timeformat: "%m/%y", + ticks: getTicks(critical, high, medium, low), }], yaxes: [{ min: 0 @@ -153,7 +158,8 @@ function accepted_per_month(critical, high, medium, low) { var options = { xaxes: [{ mode: 'time', - timeformat: "%m/%y" + timeformat: "%m/%y", + ticks: getTicks(critical, high, medium, low), }], yaxes: [{ min: 0 @@ -199,7 +205,8 @@ function opened_per_week(critical, high, medium, low) { var options = { xaxes: [{ mode: 'time', - timeformat: "%m/%d/%Y" + timeformat: "%m/%d/%Y", + ticks: getTicks(critical, high, medium, low), }], yaxes: [{ min: 0 @@ -245,7 +252,8 @@ function accepted_per_week(critical, high, medium, low) { var options = { xaxes: [{ mode: 'time', - timeformat: "%m/%d/%Y" + timeformat: "%m/%d/%Y", + ticks: getTicks(critical, high, medium, low), }], yaxes: [{ min: 0 diff --git a/dojo/survey/views.py b/dojo/survey/views.py index 29b4a2fc81d..d83803f2efd 100644 --- a/dojo/survey/views.py +++ b/dojo/survey/views.py @@ -77,12 +77,11 @@ def delete_engagement_survey(request, eid, sid): "Questionnaire deleted successfully.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_engagement", args=(engagement.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete Questionnaire.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to delete Questionnaire.", + extra_tags="alert-danger") add_breadcrumb( title="Delete " + survey.survey.name + " Questionnaire", @@ -145,12 +144,11 @@ def answer_questionnaire(request, eid, sid): "Successfully answered, all answers valid.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_engagement", args=(engagement.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire has errors, please correct.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Questionnaire has errors, please correct.", + extra_tags="alert-danger") add_breadcrumb( title="Answer " + survey.survey.name + " Survey", top_level=False, @@ -243,12 +241,11 @@ def add_questionnaire(request, eid): if "respond_survey" in request.POST: return HttpResponseRedirect(reverse("answer_questionnaire", args=(eid, survey.id))) return HttpResponseRedirect(reverse("view_engagement", args=(eid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire could not be added.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Questionnaire could not be added.", + extra_tags="alert-danger") form.fields["survey"].queryset = surveys add_breadcrumb(title="Add Questionnaire", top_level=False, request=request) @@ -290,12 +287,11 @@ def edit_questionnaire(request, sid): "Questionnaire successfully updated, you may now add/edit questions.", extra_tags="alert-success") return HttpResponseRedirect(reverse("edit_questionnaire", args=(survey.id,))) - else: - messages.add_message( - request, - messages.SUCCESS, - "No changes detected, questionnaire not updated.", - extra_tags="alert-warning") + messages.add_message( + request, + messages.SUCCESS, + "No changes detected, questionnaire not updated.", + extra_tags="alert-warning") if "add_questions" in request.POST: return HttpResponseRedirect(reverse("edit_questionnaire_questions", args=(survey.id,))) else: @@ -360,14 +356,12 @@ def create_questionnaire(request): extra_tags="alert-success") if "add_questions" in request.POST: return HttpResponseRedirect(reverse("edit_questionnaire_questions", args=(survey.id,))) - else: - return HttpResponseRedirect(reverse("questionnaire")) - else: - messages.add_message( - request, - messages.ERROR, - "Please correct any errors displayed below.", - extra_tags="alert-danger") + return HttpResponseRedirect(reverse("questionnaire")) + messages.add_message( + request, + messages.ERROR, + "Please correct any errors displayed below.", + extra_tags="alert-danger") add_breadcrumb(title="Create Questionnaire", top_level=False, request=request) return render(request, "defectDojo-engagement-survey/create_questionnaire.html", { @@ -411,12 +405,11 @@ def edit_questionnaire_questions(request, sid): "Questionnaire questions successfully saved.", extra_tags="alert-success") return HttpResponseRedirect(reverse("questionnaire")) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire questions not saved, please correct any errors displayed below.", - extra_tags="alert-success") + messages.add_message( + request, + messages.ERROR, + "Questionnaire questions not saved, please correct any errors displayed below.", + extra_tags="alert-success") add_breadcrumb(title="Update Questionnaire Questions", top_level=False, request=request) return render(request, "defectDojo-engagement-survey/edit_survey_questions.html", { @@ -488,8 +481,7 @@ def create_question(request): "Text Question added successfully.", extra_tags="alert-success") return HttpResponseRedirect(reverse("questions")) - else: - error = True + error = True elif type == "choice": if choiceQuestionFrom.is_valid(): @@ -511,8 +503,7 @@ def create_question(request): "Choice Question added successfully.", extra_tags="alert-success") return HttpResponseRedirect(reverse("questions")) - else: - error = True + error = True if "_popup" in request.GET and not error: resp = f'' @@ -638,12 +629,11 @@ def add_empty_questionnaire(request): if "respond_survey" in request.POST: return HttpResponseRedirect(reverse("dashboard")) return HttpResponseRedirect(reverse("questionnaire")) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire could not be added.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Questionnaire could not be added.", + extra_tags="alert-danger") form.fields["survey"].queryset = surveys add_breadcrumb(title="Add Empty Questionnaire", top_level=False, request=request) @@ -695,12 +685,11 @@ def delete_empty_questionnaire(request, esid): "Questionnaire deleted successfully.", extra_tags="alert-success") return HttpResponseRedirect(reverse("survey")) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete Questionnaire.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to delete Questionnaire.", + extra_tags="alert-danger") add_breadcrumb( title="Delete " + survey.survey.name + " Questionnaire", @@ -731,12 +720,11 @@ def delete_general_questionnaire(request, esid): "Questionnaire deleted successfully.", extra_tags="alert-success") return HttpResponseRedirect(reverse("questionnaire")) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete questionnaire.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to delete questionnaire.", + extra_tags="alert-danger") add_breadcrumb( title="Delete " + survey.survey.name + " Questionnaire", @@ -815,12 +803,11 @@ def answer_empty_survey(request, esid): extra_tags="alert-success") return HttpResponseRedirect( reverse("dashboard")) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire has errors, please correct.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Questionnaire has errors, please correct.", + extra_tags="alert-danger") add_breadcrumb( title="Answer Empty " + engagement_survey.name + " Questionnaire", top_level=False, @@ -857,12 +844,11 @@ def engagement_empty_survey(request, esid): "Engagement created and questionnaire successfully linked.", extra_tags="alert-success") return HttpResponseRedirect(reverse("edit_engagement", args=(engagement.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Questionnaire could not be added.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Questionnaire could not be added.", + extra_tags="alert-danger") add_breadcrumb( title="Link Questionnaire to new Engagement", top_level=False, diff --git a/dojo/system_settings/views.py b/dojo/system_settings/views.py index 3690201a050..4c952d57a0f 100644 --- a/dojo/system_settings/views.py +++ b/dojo/system_settings/views.py @@ -116,7 +116,7 @@ def get_celery_status( context["celery_msg"] = "Celery needs to have the setting CELERY_RESULT_BACKEND = 'db+sqlite:///dojo.celeryresults.sqlite' set in settings.py." context["celery_status"] = "Unknown" - return None + return def get_template(self) -> str: return "dojo/system_settings.html" diff --git a/dojo/tags_signals.py b/dojo/tags_signals.py index f7e09fa9b0c..605996a602c 100644 --- a/dojo/tags_signals.py +++ b/dojo/tags_signals.py @@ -77,3 +77,4 @@ def get_product(instance): return instance.engagement.product if isinstance(instance, Finding): return instance.test.engagement.product + return None diff --git a/dojo/templates/base.html b/dojo/templates/base.html index 765ec10dc55..722656ae6a9 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -541,6 +541,13 @@ {% trans "Notifications" %} + {% if system_settings.enable_webhooks_notifications and "dojo.view_notification_webhooks"|has_configuration_permission:request %} +
  • + + {% trans "Notification Webhooks" %} + +
  • + {% endif %}
  • {% trans "Regulations" %} diff --git a/dojo/templates/dojo/add_notification_webhook.html b/dojo/templates/dojo/add_notification_webhook.html new file mode 100644 index 00000000000..12056373af4 --- /dev/null +++ b/dojo/templates/dojo/add_notification_webhook.html @@ -0,0 +1,13 @@ +{% extends "base.html" %} +{% block content %} + {{ block.super }} +

    Add a new Notification Webhook

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + +
    +
    +
    +{% endblock %} diff --git a/dojo/templates/dojo/delete_notification_webhook.html b/dojo/templates/dojo/delete_notification_webhook.html new file mode 100644 index 00000000000..f196ad94fc9 --- /dev/null +++ b/dojo/templates/dojo/delete_notification_webhook.html @@ -0,0 +1,12 @@ +{% extends "base.html" %} +{% block content %} +

    Delete Notification Webhook

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + +
    +
    +
    +{% endblock %} diff --git a/dojo/templates/dojo/edit_notification_webhook.html b/dojo/templates/dojo/edit_notification_webhook.html new file mode 100644 index 00000000000..94bd56c2307 --- /dev/null +++ b/dojo/templates/dojo/edit_notification_webhook.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} + {% block content %} + {{ block.super }} +

    Edit Notification Webhook

    +
    {% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
    +
    + + +
    +
    +
    + {% endblock %} + \ No newline at end of file diff --git a/dojo/templates/dojo/notifications.html b/dojo/templates/dojo/notifications.html index 52d87393c45..81fac49d5cc 100644 --- a/dojo/templates/dojo/notifications.html +++ b/dojo/templates/dojo/notifications.html @@ -89,6 +89,9 @@

    {% if 'mail' in enabled_notifications %} {% trans "Mail" %} {% endif %} + {% if 'webhooks' in enabled_notifications %} + {% trans "Webhooks" %} + {% endif %} {% trans "Alert" %} diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index 1b7f50a73df..0470bd8357a 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -122,18 +122,20 @@

    Edit Custom Fields

  • - -
  • - - Add Scan API Configuration - -
  • + {% endif %} + + {% if prod|has_object_permission:"Product_API_Scan_Configuration_Edit" %}
  • - - View Scan API Configurations - + + Add Scan API Configuration +
  • {% endif %} +
  • + + View Scan API Configurations + +
  • {% if system_settings.enable_product_tracking_files %} {% if prod|has_object_permission:"Product_Tracking_Files_Add" %} diff --git a/dojo/templates/dojo/system_settings.html b/dojo/templates/dojo/system_settings.html index 693abe712f0..02510452e16 100644 --- a/dojo/templates/dojo/system_settings.html +++ b/dojo/templates/dojo/system_settings.html @@ -62,7 +62,7 @@

    System Settings

    } $(function () { - $.each(['slack','msteams','mail', 'grade'], function (index, value) { + $.each(['slack','msteams','mail','webhooks','grade'], function (index, value) { updatenotificationsgroup(value); $('#id_enable_' + value + '_notifications').change(function() { updatenotificationsgroup(value)}); }); diff --git a/dojo/templates/dojo/view_notification_webhooks.html b/dojo/templates/dojo/view_notification_webhooks.html new file mode 100644 index 00000000000..6b02c0888d3 --- /dev/null +++ b/dojo/templates/dojo/view_notification_webhooks.html @@ -0,0 +1,101 @@ +{% extends "base.html" %} +{% load navigation_tags %} +{% load display_tags %} +{% load i18n %} +{% load authorization_tags %} +{% block content %} + {{ block.super }} +
    +
    +
    +
    +

    + Notification Webhook List + +

    +
    + +
    + {% if nwhs %} + +
    + {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
    + +
    + + + + + + + + + + + + {% for nwh in nwhs %} + + + + + + + {% if "dojo.edit_notification_webhook"|has_configuration_permission:request %} + + {% endif %} + + {% endfor %} + +
    {% dojo_sort request 'Notification Webhook Name' 'name' 'asc' %}URLStatusNoteOwner
    {{ nwh.name }}{{ nwh.url }}{{ nwh.get_status_display }} + {% if nwh.first_error or nwh.last_error %} + + {% endif %} + {{ nwh.note }}{% if nwh.owner %}{{ nwh.owner }}{% else %}System Webhook{% endif %} + +
    +
    +
    + {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
    + {% else %} +

    No Notification Webook found.

    + {% endif %} +
    +
    +{% endblock %} +{% block postscript %} + {{ block.super }} + {% include "dojo/filter_js_snippet.html" %} +{% endblock %} diff --git a/dojo/templates/dojo/view_product_details.html b/dojo/templates/dojo/view_product_details.html index 30dd863fc3c..3f7ea62ce32 100644 --- a/dojo/templates/dojo/view_product_details.html +++ b/dojo/templates/dojo/view_product_details.html @@ -41,19 +41,21 @@

    {% trans "Description" %}

    {% trans "Edit Custom Fields" %} - -
  • - - {% trans "Add API Scan Configuration" %} - -
  • + {% endif %} + + {% if prod|has_object_permission:"Product_API_Scan_Configuration_Add" %}
  • - - {% trans "View API Scan Configurations" %} - + + {% trans "Add API Scan Configuration" %} +
  • {% endif %} +
  • + + {% trans "View API Scan Configurations" %} + +
  • {% if system_settings.enable_product_tracking_files %} {% if prod|has_object_permission:"Product_Tracking_Files_Add" %} @@ -295,7 +297,7 @@

    {% trans "Members" %}

    {% endif %} - {% if product_members or product_type_members %} + {% if product_members or product_type_members or global_product_members %}
    @@ -348,6 +350,15 @@

    {% trans "Members" %}

    {% endfor %} + {% for member in global_product_members %} + + + + + + + {% endfor %}
    {{ member.role }}
    + {{ member.user.get_full_name }}Global role{{ member.role }}
    @@ -383,7 +394,7 @@

    {% trans "Groups" %}

    {% endif %} - {% if product_groups or product_type_groups %} + {% if product_groups or product_type_groups or global_product_groups %}
    @@ -435,6 +446,14 @@

    {% trans "Groups" %}

    {% endfor %} + {% for type_group in global_product_groups %} + + + + + + + {% endfor %}
    {{ type_group.role }}
    {{ type_group.group.name }}Global role{{ type_group.role }}
    @@ -668,7 +687,7 @@