diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 837b461c15d..d2da08eb7fd 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -35,7 +35,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.11.0 + uses: manusa/actions-setup-minikube@v2.12.0 with: minikube version: 'v1.33.1' kubernetes version: ${{ matrix.k8s }} diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 907ecf92968..f153a368ba9 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -34,8 +34,8 @@ jobs: run: docker/setEnv.sh unit_tests_cicd # phased startup so we can use the exit code from unit test container - - name: Start Postgres - run: docker compose up -d postgres + - name: Start Postgres and webhook.endpoint + run: docker compose up -d postgres webhook.endpoint # no celery or initializer needed for unit tests - name: Unit tests diff --git a/components/package.json b/components/package.json index 687ffe51f10..ca4351fe41e 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.38.4", + "version": "2.39.0", "license" : "BSD-3-Clause", "private": true, "dependencies": { @@ -26,7 +26,7 @@ "google-code-prettify": "^1.0.0", "jquery": "^3.7.1", "jquery-highlight": "3.5.0", - "jquery-ui": "1.13.3", + "jquery-ui": "1.14.0", "jquery.cookie": "1.4.1", "jquery.flot.tooltip": "^0.9.0", "jquery.hotkeys": "jeresig/jquery.hotkeys#master", @@ -35,7 +35,7 @@ "metismenu": "~3.0.7", "moment": "^2.30.1", "morris.js": "morrisjs/morris.js", - "pdfmake": "^0.2.12", + "pdfmake": "^0.2.13", "startbootstrap-sb-admin-2": "1.0.7" }, "engines": { diff --git a/components/yarn.lock b/components/yarn.lock index b4bfb09a423..7bb19365790 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -678,12 +678,12 @@ jquery-highlight@3.5.0: dependencies: jquery ">= 1.0.0" -jquery-ui@1.13.3: - version "1.13.3" - resolved "https://registry.yarnpkg.com/jquery-ui/-/jquery-ui-1.13.3.tgz#d9f5292b2857fa1f2fdbbe8f2e66081664eb9bc5" - integrity sha512-D2YJfswSJRh/B8M/zCowDpNFfwsDmtfnMPwjJTyvl+CBqzpYwQ+gFYIbUUlzijy/Qvoy30H1YhoSui4MNYpRwA== +jquery-ui@1.14.0: + version "1.14.0" + resolved "https://registry.yarnpkg.com/jquery-ui/-/jquery-ui-1.14.0.tgz#b75d417826f0bab38125f907356d2e3313a9c6d5" + integrity sha512-mPfYKBoRCf0MzaT2cyW5i3IuZ7PfTITaasO5OFLAQxrHuI+ZxruPa+4/K1OMNT8oElLWGtIxc9aRbyw20BKr8g== dependencies: - jquery ">=1.8.0 <4.0.0" + jquery ">=1.12.0 <5.0.0" jquery.cookie@1.4.1: version "1.4.1" @@ -699,7 +699,7 @@ jquery.hotkeys@jeresig/jquery.hotkeys#master: version "0.2.0" resolved "https://codeload.github.com/jeresig/jquery.hotkeys/tar.gz/f24f1da275aab7881ab501055c256add6f690de4" -"jquery@>= 1.0.0", jquery@>=1.7, jquery@>=1.7.0, "jquery@>=1.8.0 <4.0.0", jquery@^3.7.1: +"jquery@>= 1.0.0", "jquery@>=1.12.0 <5.0.0", jquery@>=1.7, jquery@>=1.7.0, jquery@^3.7.1: version "3.7.1" resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.7.1.tgz#083ef98927c9a6a74d05a6af02806566d16274de" integrity sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg== @@ -824,10 +824,10 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -pdfmake@^0.2.12: - version "0.2.12" - resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.12.tgz#5156f91ff73797947942aa342423bedaa0c0bc93" - integrity sha512-TFsqaG6KVtk+TWermmJNNwom3wmB/xiz07prM74KBhdM+7pz3Uwq2b0uoqhhQRn6cYUTpL8lXZY6xF011o1YcQ== +pdfmake@^0.2.13: + version "0.2.13" + resolved "https://registry.yarnpkg.com/pdfmake/-/pdfmake-0.2.13.tgz#ea43fe9f0c8de1e5ec7b08486d6f4f8bbb8619e4" + integrity sha512-qeVE9Bzjm0oPCitH4/HYM/XCGTwoeOAOVAXPnV3s0kpPvTLkTF/bAF4jzorjkaIhXGQhzYk6Xclt0hMDYLY93w== dependencies: "@foliojs-fork/linebreak" "^1.1.1" "@foliojs-fork/pdfkit" "^0.14.0" diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index f3a281af061..581dd627900 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -53,3 +53,5 @@ services: published: 8025 protocol: tcp mode: host + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a diff --git a/docker-compose.override.unit_tests.yml b/docker-compose.override.unit_tests.yml index 164d7a87084..baf50d51e60 100644 --- a/docker-compose.override.unit_tests.yml +++ b/docker-compose.override.unit_tests.yml @@ -1,7 +1,7 @@ --- services: nginx: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'nginx'] volumes: - defectdojo_media_unit_tests:/usr/share/nginx/html/media @@ -30,13 +30,13 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' celerybeat: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery beat'] celeryworker: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery worker'] initializer: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'initializer'] postgres: ports: @@ -49,8 +49,10 @@ services: volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data redis: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker-compose.override.unit_tests_cicd.yml b/docker-compose.override.unit_tests_cicd.yml index b39f4cf034d..1ca70557d41 100644 --- a/docker-compose.override.unit_tests_cicd.yml +++ b/docker-compose.override.unit_tests_cicd.yml @@ -1,7 +1,7 @@ --- services: nginx: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'nginx'] volumes: - defectdojo_media_unit_tests:/usr/share/nginx/html/media @@ -29,13 +29,13 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' celerybeat: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery beat'] celeryworker: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'celery worker'] initializer: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'initializer'] postgres: ports: @@ -48,8 +48,10 @@ services: volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data redis: - image: busybox:1.36.1-musl + image: busybox:1.37.0-musl entrypoint: ['echo', 'skipping', 'redis'] + "webhook.endpoint": + image: mccutchen/go-httpbin:v2.15.0@sha256:24528cf5229d0b70065ac27e6c9e4d96f5452a84a3ce4433e56573c18d96827a volumes: defectdojo_postgres_unit_tests: {} defectdojo_media_unit_tests: {} diff --git a/docker-compose.yml b/docker-compose.yml index df2182f72ef..cf0e023f32a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -103,7 +103,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:16.4-alpine@sha256:492898505cb45f9835acc327e98711eaa9298ed804e0bb36f29e08394229550d + image: postgres:17.0-alpine@sha256:14195b0729fce792f47ae3c3704d6fd04305826d57af3b01d5b4d004667df174 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} @@ -111,7 +111,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data redis: - image: redis:7.2.5-alpine@sha256:0bc09d9f486508aa42ecc2f18012bb1e3a1b2744ef3a6ad30942fa12579f0b03 + image: redis:7.2.5-alpine@sha256:6aaf3f5e6bc8a592fbfe2cccf19eb36d27c39d12dab4f4b01556b7449e7b1f44 volumes: - defectdojo_redis:/data volumes: diff --git a/docker/install_chrome_dependencies.py b/docker/install_chrome_dependencies.py index c17fabbc8be..b0ddda14755 100644 --- a/docker/install_chrome_dependencies.py +++ b/docker/install_chrome_dependencies.py @@ -10,7 +10,13 @@ def find_packages(library_name): - stdout = run_command(["apt-file", "search", library_name]) + stdout, stderr, status_code = run_command(["apt-file", "search", library_name]) + # Check if ldd has failed for a good reason, or if there are no results + if status_code != 0: + # Any other case should be be caught + msg = f"apt-file search (exit code {status_code}): {stderr}" + raise ValueError(msg) + if not stdout.strip(): return [] libs = [line.split(":")[0] for line in stdout.strip().split("\n")] @@ -18,35 +24,33 @@ def find_packages(library_name): def run_command(cmd, cwd=None, env=None): + # Do not raise exception here because some commands are too loose with negative exit codes result = subprocess.run(cmd, cwd=cwd, env=env, capture_output=True, text=True, check=False) - return result.stdout + return result.stdout.strip(), result.stderr.strip(), result.returncode def ldd(file_path): - stdout = run_command(["ldd", file_path]) - # For simplicity, I'm assuming if we get an error, the code is non-zero. - try: - result = subprocess.run( - ["ldd", file_path], capture_output=True, text=True, check=False, - ) - stdout = result.stdout - code = result.returncode - except subprocess.CalledProcessError: - stdout = "" - code = 1 - return stdout, code + stdout, stderr, status_code = run_command(["ldd", file_path]) + # Check if ldd has failed for a good reason, or if there are no results + if status_code != 0: + # It is often the case when stdout will be empty. This is not an error + if not stdout: + return stdout, status_code + # Any other case should be be caught + msg = f"ldd (exit code {status_code}): {stderr}" + raise ValueError(msg) + + return stdout, status_code raw_deps = ldd("/opt/chrome/chrome") dependencies = raw_deps[0].splitlines() - missing_deps = { r[0].strip() for d in dependencies for r in [d.split("=>")] if len(r) == 2 and r[1].strip() == "not found" } - missing_packages = [] for d in missing_deps: all_packages = find_packages(d) @@ -59,5 +63,4 @@ def ldd(file_path): ] for p in packages: missing_packages.append(p) - logger.info("missing_packages: " + (" ".join(missing_packages))) diff --git a/docs/content/en/getting_started/upgrading/2.39.md b/docs/content/en/getting_started/upgrading/2.39.md new file mode 100644 index 00000000000..0f179d7b5d1 --- /dev/null +++ b/docs/content/en/getting_started/upgrading/2.39.md @@ -0,0 +1,7 @@ +--- +title: 'Upgrading to DefectDojo Version 2.39.x' +toc_hide: true +weight: -20240903 +description: No special instructions. +--- +There are no special instructions for upgrading to 2.39.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.39.0) for the contents of the release. diff --git a/docs/content/en/integrations/burp-plugin.md b/docs/content/en/integrations/burp-plugin.md index 400b37c0f2a..ab3285ceda4 100644 --- a/docs/content/en/integrations/burp-plugin.md +++ b/docs/content/en/integrations/burp-plugin.md @@ -2,7 +2,7 @@ title: "Defect Dojo Burp plugin" description: "Export findings directly from Burp to DefectDojo." draft: false -weight: 8 +weight: 9 --- **Please note: The DefectDojo Burp Plugin has been sunset and is no longer a supported feature.** diff --git a/docs/content/en/integrations/exporting.md b/docs/content/en/integrations/exporting.md index da17df7d93b..7a42d27b17e 100644 --- a/docs/content/en/integrations/exporting.md +++ b/docs/content/en/integrations/exporting.md @@ -2,7 +2,7 @@ title: "Exporting" description: "DefectDojo has the ability to export findings." draft: false -weight: 11 +weight: 12 --- diff --git a/docs/content/en/integrations/google-sheets-sync.md b/docs/content/en/integrations/google-sheets-sync.md index b6e97f72f84..456a694fc6e 100644 --- a/docs/content/en/integrations/google-sheets-sync.md +++ b/docs/content/en/integrations/google-sheets-sync.md @@ -2,7 +2,7 @@ title: "Google Sheets synchronisation" description: "Export finding details to Google Sheets and upload changes from Google Sheets." draft: false -weight: 7 +weight: 8 --- **Please note - the Google Sheets feature has been deprecated as of DefectDojo version 2.21.0 - these documents are for reference only.** diff --git a/docs/content/en/integrations/languages.md b/docs/content/en/integrations/languages.md index 17a322c8f90..a78ed137e69 100644 --- a/docs/content/en/integrations/languages.md +++ b/docs/content/en/integrations/languages.md @@ -2,7 +2,7 @@ title: "Languages and lines of code" description: "You can import an analysis of languages used in a project, including lines of code." draft: false -weight: 9 +weight: 10 --- ## Import of languages for a project diff --git a/docs/content/en/integrations/notification_webhooks/_index.md b/docs/content/en/integrations/notification_webhooks/_index.md new file mode 100644 index 00000000000..d8fe606cffa --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/_index.md @@ -0,0 +1,79 @@ +--- +title: "Notification Webhooks (experimental)" +description: "How to setup and use webhooks" +weight: 7 +chapter: true +--- + +Webhooks are HTTP requests coming from the DefectDojo instance towards user-defined webserver which expects this kind of incoming traffic. + +## Transition graph: + +It is not unusual that in some cases webhook can not be performed. It is usually connected to network issues, server misconfiguration, or running upgrades on the server. DefectDojo needs to react to these outages. It might temporarily or permanently disable related endpoints. The following graph shows how it might change the status of the webhook definition based on HTTP responses (or manual user interaction). + +```mermaid +flowchart TD + + START{{Endpoint created}} + ALL{All states} + STATUS_ACTIVE([STATUS_ACTIVE]) + STATUS_INACTIVE_TMP + STATUS_INACTIVE_PERMANENT + STATUS_ACTIVE_TMP([STATUS_ACTIVE_TMP]) + END{{Endpoint removed}} + + START ==> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 200 or 201 --> STATUS_ACTIVE + STATUS_ACTIVE --HTTP 5xx
or HTTP 429
or Timeout--> STATUS_INACTIVE_TMP + STATUS_ACTIVE --Any HTTP 4xx response
or any other HTTP response
or non-HTTP error--> STATUS_INACTIVE_PERMANENT + STATUS_INACTIVE_TMP -.After 60s.-> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h
from the first error-->STATUS_INACTIVE_TMP + STATUS_ACTIVE_TMP -.After 24h.-> STATUS_ACTIVE + STATUS_ACTIVE_TMP --HTTP 200 or 201 --> STATUS_ACTIVE_TMP + STATUS_ACTIVE_TMP --HTTP 5xx
or HTTP 429
or Timeout
within 24h from the first error
or any other HTTP response or error--> STATUS_INACTIVE_PERMANENT + ALL ==Activation by user==> STATUS_ACTIVE + ALL ==Deactivation by user==> STATUS_INACTIVE_PERMANENT + ALL ==Removal of endpoint by user==> END +``` + +Notes: + +1. Transitions: + - bold: manual changes by user + - dotted: automated by celery + - others: based on responses on webhooks +1. Nodes: + - Stadium-shaped: Active - following webhook can be sent + - Rectangles: Inactive - performing of webhook will fail (and not retried) + - Hexagonal: Initial and final states + - Rhombus: All states (meta node to make the graph more readable) + +## Body and Headers + +The body of each request is JSON which contains data about related events like names and IDs of affected elements. +Examples of bodies are on pages related to each event (see below). + +Each request contains the following headers. They might be useful for better handling of events by server this process events. + +```yaml +User-Agent: DefectDojo- +X-DefectDojo-Event: +X-DefectDojo-Instance: +``` +## Disclaimer + +This functionality is new and in experimental mode. This means Functionality might generate breaking changes in following DefectDojo releases and might not be considered final. + +However, the community is open to feedback to make this functionality better and transform it stable as soon as possible. + +## Roadmap + +There are a couple of known issues that are expected to be implemented as soon as core functionality is considered ready. + +- Support events - Not only adding products, product types, engagements, tests, or upload of new scans but also events around SLA +- User webhook - right now only admins can define webhooks; in the future also users will be able to define their own +- Improvement in UI - add filtering and pagination of webhook endpoints + +## Events + + \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/engagement_added.md b/docs/content/en/integrations/notification_webhooks/engagement_added.md new file mode 100644 index 00000000000..64fd7746ec2 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/engagement_added.md @@ -0,0 +1,38 @@ +--- +title: "Event: engagement_added" +weight: 3 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: engagement_added +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/product_added.md b/docs/content/en/integrations/notification_webhooks/product_added.md new file mode 100644 index 00000000000..2d90a6a681f --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_added.md @@ -0,0 +1,32 @@ +--- +title: "Event: product_added" +weight: 2 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_added +``` + +## Event HTTP body +```json +{ + "description": null, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/product_type_added.md b/docs/content/en/integrations/notification_webhooks/product_type_added.md new file mode 100644 index 00000000000..1171f513831 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/product_type_added.md @@ -0,0 +1,26 @@ +--- +title: "Event: product_type_added" +weight: 1 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: product_type_added +``` + +## Event HTTP body +```json +{ + "description": null, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/scan_added.md b/docs/content/en/integrations/notification_webhooks/scan_added.md new file mode 100644 index 00000000000..27a40e6cab1 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/scan_added.md @@ -0,0 +1,90 @@ +--- +title: "Event: scan_added and scan_added_empty" +weight: 5 +chapter: true +--- + +Event `scan_added_empty` describes a situation when reimport did not affect the existing test (no finding has been created or closed). + +## Event HTTP header for scan_added +```yaml +X-DefectDojo-Event: scan_added +``` + +## Event HTTP header for scan_added_empty +```yaml +X-DefectDojo-Event: scan_added_empty +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "finding_count": 4, + "findings": { + "mitigated": [ + { + "id": 233, + "severity": "Medium", + "title": "Mitigated Finding", + "url_api": "http://localhost:8080/api/v2/findings/233/", + "url_ui": "http://localhost:8080/finding/233" + } + ], + "new": [ + { + "id": 232, + "severity": "Critical", + "title": "New Finding", + "url_api": "http://localhost:8080/api/v2/findings/232/", + "url_ui": "http://localhost:8080/finding/232" + } + ], + "reactivated": [ + { + "id": 234, + "severity": "Low", + "title": "Reactivated Finding", + "url_api": "http://localhost:8080/api/v2/findings/234/", + "url_ui": "http://localhost:8080/finding/234" + } + ], + "untouched": [ + { + "id": 235, + "severity": "Info", + "title": "Untouched Finding", + "url_api": "http://localhost:8080/api/v2/findings/235/", + "url_ui": "http://localhost:8080/finding/235" + } + ] + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notification_webhooks/test_added.md b/docs/content/en/integrations/notification_webhooks/test_added.md new file mode 100644 index 00000000000..8614a80e0a6 --- /dev/null +++ b/docs/content/en/integrations/notification_webhooks/test_added.md @@ -0,0 +1,44 @@ +--- +title: "Event: test_added" +weight: 4 +chapter: true +--- + +## Event HTTP header +```yaml +X-DefectDojo-Event: test_added +``` + +## Event HTTP body +```json +{ + "description": null, + "engagement": { + "id": 7, + "name": "notif eng", + "url_api": "http://localhost:8080/api/v2/engagements/7/", + "url_ui": "http://localhost:8080/engagement/7" + }, + "product": { + "id": 4, + "name": "notif prod", + "url_api": "http://localhost:8080/api/v2/products/4/", + "url_ui": "http://localhost:8080/product/4" + }, + "product_type": { + "id": 4, + "name": "notif prod type", + "url_api": "http://localhost:8080/api/v2/product_types/4/", + "url_ui": "http://localhost:8080/product/type/4" + }, + "test": { + "id": 90, + "title": "notif test", + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90" + }, + "url_api": "http://localhost:8080/api/v2/tests/90/", + "url_ui": "http://localhost:8080/test/90", + "user": null +} +``` \ No newline at end of file diff --git a/docs/content/en/integrations/notifications.md b/docs/content/en/integrations/notifications.md index d5af295f0eb..803388797cd 100644 --- a/docs/content/en/integrations/notifications.md +++ b/docs/content/en/integrations/notifications.md @@ -18,6 +18,7 @@ The following notification methods currently exist: - Email - Slack - Microsoft Teams + - Webhooks - Alerts within DefectDojo (default) You can set these notifications on a global scope (if you have @@ -124,4 +125,8 @@ However, there is a specific use-case when the user decides to disable notificat The scope of this setting is customizable (see environmental variable `DD_NOTIFICATIONS_SYSTEM_LEVEL_TRUMP`). -For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) \ No newline at end of file +For more information about this behavior see the [related pull request #9699](https://github.com/DefectDojo/django-DefectDojo/pull/9699/) + +## Webhooks (experimental) + +DefectDojo also supports webhooks that follow the same events as other notifications (you can be notified in the same situations). Details about setup are described in [related page](../notification_webhooks/). diff --git a/docs/content/en/integrations/rate_limiting.md b/docs/content/en/integrations/rate_limiting.md index 0cac784c5f5..1ea76ace5b3 100644 --- a/docs/content/en/integrations/rate_limiting.md +++ b/docs/content/en/integrations/rate_limiting.md @@ -2,7 +2,7 @@ title: "Rate Limiting" description: "Configurable rate limiting on the login page to mitigate brute force attacks" draft: false -weight: 9 +weight: 11 --- diff --git a/docs/package-lock.json b/docs/package-lock.json index 56ef63cc01b..93d84625c28 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6,7 +6,7 @@ "": { "devDependencies": { "autoprefixer": "10.4.20", - "postcss": "8.4.41", + "postcss": "8.4.47", "postcss-cli": "11.0.0" } }, @@ -585,9 +585,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "node_modules/picomatch": { @@ -612,9 +612,9 @@ } }, "node_modules/postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, "funding": [ { @@ -632,8 +632,8 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" @@ -834,9 +834,9 @@ } }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -1372,9 +1372,9 @@ "dev": true }, "picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "picomatch": { @@ -1390,14 +1390,14 @@ "dev": true }, "postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, "requires": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" } }, "postcss-cli": { @@ -1504,9 +1504,9 @@ "dev": true }, "source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true }, "string-width": { diff --git a/docs/package.json b/docs/package.json index 9eb98f0f32b..9720854bf00 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,6 +1,6 @@ { "devDependencies": { - "postcss": "8.4.41", + "postcss": "8.4.47", "autoprefixer": "10.4.20", "postcss-cli": "11.0.0" } diff --git a/dojo/__init__.py b/dojo/__init__.py index 6697fae9611..6bc97e6bbb2 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.38.4" +__version__ = "2.39.0" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/dojo/admin.py b/dojo/admin.py index a2452ce1e54..c40d39e3c23 100644 --- a/dojo/admin.py +++ b/dojo/admin.py @@ -22,29 +22,25 @@ class QuestionChildAdmin(PolymorphicChildModelAdmin): - """ - Base admin class for all child models of Question - """ + + """Base admin class for all child models of Question""" base_model = Question class TextQuestionAdmin(QuestionChildAdmin): - """ - ModelAdmin for a TextQuestion - """ + + """ModelAdmin for a TextQuestion""" class ChoiceQuestionAdmin(QuestionChildAdmin): - """ - ModelAdmin for a ChoiceQuestion - """ + + """ModelAdmin for a ChoiceQuestion""" class QuestionParentAdmin(PolymorphicParentModelAdmin): - """ - Question parent model admin - """ + + """Question parent model admin""" base_model = Question child_models = ( @@ -60,29 +56,25 @@ class QuestionParentAdmin(PolymorphicParentModelAdmin): class AnswerChildAdmin(PolymorphicChildModelAdmin): - """ - Base admin class for all child Answer models - """ + + """Base admin class for all child Answer models""" base_model = Answer class TextAnswerAdmin(AnswerChildAdmin): - """ - ModelAdmin for TextAnswer - """ + + """ModelAdmin for TextAnswer""" class ChoiceAnswerAdmin(AnswerChildAdmin): - """ - ModelAdmin for ChoiceAnswer - """ + + """ModelAdmin for ChoiceAnswer""" class AnswerParentAdmin(PolymorphicParentModelAdmin): - """ - The parent model admin for answer - """ + + """The parent model admin for answer""" list_display = ( "answered_survey", diff --git a/dojo/announcement/views.py b/dojo/announcement/views.py index 6b0cb16bc3c..26160c3236b 100644 --- a/dojo/announcement/views.py +++ b/dojo/announcement/views.py @@ -81,12 +81,11 @@ def dismiss_announcement(request): extra_tags="alert-success", ) return HttpResponseRedirect("dashboard") - else: - messages.add_message( - request, - messages.ERROR, - _("Failed to remove announcement."), - extra_tags="alert-danger", - ) - return render(request, "dojo/dismiss_announcement.html") + messages.add_message( + request, + messages.ERROR, + _("Failed to remove announcement."), + extra_tags="alert-danger", + ) + return render(request, "dojo/dismiss_announcement.html") return render(request, "dojo/dismiss_announcement.html") diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index e32683c3742..6c6b4792757 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -29,8 +29,7 @@ def delete_preview(self, request, pk=None): def flatten(elem): if isinstance(elem, list): return itertools.chain.from_iterable(map(flatten, elem)) - else: - return [elem] + return [elem] rels = [ { diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py index f7669826830..fe508c92b1b 100644 --- a/dojo/api_v2/permissions.py +++ b/dojo/api_v2/permissions.py @@ -35,8 +35,7 @@ def check_post_permission(request, post_model, post_pk, post_permission): raise ParseError(msg) object = get_object_or_404(post_model, pk=request.data.get(post_pk)) return user_has_permission(request.user, object, post_permission) - else: - return True + return True def check_object_permission( @@ -49,14 +48,13 @@ def check_object_permission( ): if request.method == "GET": return user_has_permission(request.user, object, get_permission) - elif request.method == "PUT" or request.method == "PATCH": + if request.method == "PUT" or request.method == "PATCH": return user_has_permission(request.user, object, put_permission) - elif request.method == "DELETE": + if request.method == "DELETE": return user_has_permission(request.user, object, delete_permission) - elif request.method == "POST": + if request.method == "POST": return user_has_permission(request.user, object, post_permission) - else: - return False + return False class UserHasAppAnalysisPermission(permissions.BasePermission): @@ -113,12 +111,11 @@ def has_permission(self, request, view): return user_has_configuration_permission( request.user, "auth.view_group", ) - elif request.method == "POST": + if request.method == "POST": return user_has_configuration_permission( request.user, "auth.add_group", ) - else: - return True + return True def has_object_permission(self, request, view, obj): if request.method == "GET": @@ -130,14 +127,13 @@ def has_object_permission(self, request, view, obj): ) and user_has_permission( request.user, obj, Permissions.Group_View, ) - else: - return check_object_permission( - request, - obj, - Permissions.Group_View, - Permissions.Group_Edit, - Permissions.Group_Delete, - ) + return check_object_permission( + request, + obj, + Permissions.Group_View, + Permissions.Group_Edit, + Permissions.Group_Delete, + ) class UserHasDojoGroupMemberPermission(permissions.BasePermission): @@ -188,8 +184,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -293,9 +288,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Engagement_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasEngagementPermission.path_engagement_post.match( @@ -308,15 +302,14 @@ def has_object_permission(self, request, view, obj): Permissions.Engagement_Edit, Permissions.Engagement_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Engagement_View, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - Permissions.Engagement_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Engagement_View, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + Permissions.Engagement_Edit, + ) class UserHasRiskAcceptancePermission(permissions.BasePermission): @@ -334,9 +327,8 @@ def has_permission(self, request, view): return check_post_permission( request, Product, "product", Permissions.Risk_Acceptance, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasRiskAcceptancePermission.path_risk_acceptance_post.match( @@ -351,15 +343,14 @@ def has_object_permission(self, request, view, obj): Permissions.Risk_Acceptance, Permissions.Risk_Acceptance, ) - else: - return check_object_permission( - request, - obj, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - Permissions.Risk_Acceptance, - ) + return check_object_permission( + request, + obj, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + Permissions.Risk_Acceptance, + ) class UserHasFindingPermission(permissions.BasePermission): @@ -382,9 +373,8 @@ def has_permission(self, request, view): return check_post_permission( request, Test, "test", Permissions.Finding_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if ( @@ -402,15 +392,14 @@ def has_object_permission(self, request, view, obj): Permissions.Finding_Edit, Permissions.Finding_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Finding_View, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - Permissions.Finding_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Finding_View, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + Permissions.Finding_Edit, + ) class UserHasImportPermission(permissions.BasePermission): @@ -435,7 +424,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, engagement, Permissions.Import_Scan_Result, ) - elif engagement_id := converted_dict.get("engagement_id"): + if engagement_id := converted_dict.get("engagement_id"): # engagement_id doesn't exist msg = f'Engagement "{engagement_id}" does not exist' raise serializers.ValidationError(msg) @@ -452,19 +441,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need engagement_id or product_name + engagement_name to perform import", ) - else: - # the engagement doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need engagement_id or product_name + engagement_name to perform import", - ) + return None + # the engagement doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need engagement_id or product_name + engagement_name to perform import", + ) class UserHasMetaImportPermission(permissions.BasePermission): @@ -490,13 +479,12 @@ def has_permission(self, request, view): return user_has_permission( request.user, product, Permissions.Import_Scan_Result, ) - elif product_id := converted_dict.get("product_id"): + if product_id := converted_dict.get("product_id"): # product_id doesn't exist msg = f'Product "{product_id}" does not exist' raise serializers.ValidationError(msg) - else: - msg = "Need product_id or product_name to perform import" - raise serializers.ValidationError(msg) + msg = "Need product_id or product_name to perform import" + raise serializers.ValidationError(msg) class UserHasProductPermission(permissions.BasePermission): @@ -556,8 +544,7 @@ def has_permission(self, request, view): return user_has_global_permission( request.user, Permissions.Product_Type_Add, ) - else: - return True + return True def has_object_permission(self, request, view, obj): return check_object_permission( @@ -631,7 +618,7 @@ def has_permission(self, request, view): return user_has_permission( request.user, test, Permissions.Import_Scan_Result, ) - elif test_id := converted_dict.get("test_id"): + if test_id := converted_dict.get("test_id"): # test_id doesn't exist msg = f'Test "{test_id}" does not exist' raise serializers.ValidationError(msg) @@ -648,19 +635,19 @@ def has_permission(self, request, view): converted_dict.get("product_type"), "Need test_id or product_name + engagement_name + scan_type to perform reimport", ) - else: - # the test doesn't exist, so we need to check if the user has - # requested and is allowed to use auto_create - return check_auto_create_permission( - request.user, - converted_dict.get("product"), - converted_dict.get("product_name"), - converted_dict.get("engagement"), - converted_dict.get("engagement_name"), - converted_dict.get("product_type"), - converted_dict.get("product_type_name"), - "Need test_id or product_name + engagement_name + scan_type to perform reimport", - ) + return None + # the test doesn't exist, so we need to check if the user has + # requested and is allowed to use auto_create + return check_auto_create_permission( + request.user, + converted_dict.get("product"), + converted_dict.get("product_name"), + converted_dict.get("engagement"), + converted_dict.get("engagement_name"), + converted_dict.get("product_type"), + converted_dict.get("product_type_name"), + "Need test_id or product_name + engagement_name + scan_type to perform reimport", + ) class UserHasTestPermission(permissions.BasePermission): @@ -676,9 +663,8 @@ def has_permission(self, request, view): return check_post_permission( request, Engagement, "engagement", Permissions.Test_Add, ) - else: - # related object only need object permission - return True + # related object only need object permission + return True def has_object_permission(self, request, view, obj): if UserHasTestPermission.path_tests_post.match( @@ -691,15 +677,14 @@ def has_object_permission(self, request, view, obj): Permissions.Test_Edit, Permissions.Test_Delete, ) - else: - return check_object_permission( - request, - obj, - Permissions.Test_View, - Permissions.Test_Edit, - Permissions.Test_Edit, - Permissions.Test_Edit, - ) + return check_object_permission( + request, + obj, + Permissions.Test_View, + Permissions.Test_Edit, + Permissions.Test_Edit, + Permissions.Test_Edit, + ) class UserHasTestImportPermission(permissions.BasePermission): @@ -776,8 +761,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -840,8 +824,7 @@ def has_permission(self, request, view): ) ) return has_permission_result - else: - return True + return True def has_object_permission(self, request, view, obj): has_permission_result = True @@ -934,9 +917,8 @@ def raise_no_auto_create_import_validation_error( if product_type_name: msg = f'Product "{product_name}" does not exist in Product_Type "{product_type_name}"' raise serializers.ValidationError(msg) - else: - msg = f'Product "{product_name}" does not exist' - raise serializers.ValidationError(msg) + msg = f'Product "{product_name}" does not exist' + raise serializers.ValidationError(msg) if engagement_name and not engagement: msg = f'Engagement "{engagement_name}" does not exist in Product "{product_name}"' @@ -1021,12 +1003,11 @@ def check_auto_create_permission( # new product type can be created with current user as owner, so # all objects in it can be created as well return True - else: - if not user_has_permission( - user, product_type, Permissions.Product_Type_Add_Product, - ): - msg = f'No permission to create products in product_type "{product_type}"' - raise PermissionDenied(msg) + if not user_has_permission( + user, product_type, Permissions.Product_Type_Add_Product, + ): + msg = f'No permission to create products in product_type "{product_type}"' + raise PermissionDenied(msg) # product can be created, so objects in it can be created as well return True diff --git a/dojo/api_v2/prefetch/prefetcher.py b/dojo/api_v2/prefetch/prefetcher.py index 3596b3f9409..917afd2a048 100644 --- a/dojo/api_v2/prefetch/prefetcher.py +++ b/dojo/api_v2/prefetch/prefetcher.py @@ -18,7 +18,8 @@ class _Prefetcher: @staticmethod def _build_serializers(): - """Returns a map model -> serializer where model is a django model and serializer is the corresponding + """ + Returns a map model -> serializer where model is a django model and serializer is the corresponding serializer used to serialize the model Returns: @@ -52,7 +53,8 @@ def __init__(self): self._prefetch_data = {} def _find_serializer(self, field_type): - """Find the best suited serializer for the given type. + """ + Find the best suited serializer for the given type. Args: field_type (django.db.models.fields): the field type for which we need to find a serializer @@ -72,7 +74,8 @@ def _find_serializer(self, field_type): return self._find_serializer(parent_class) def _prefetch(self, entry, fields_to_fetch): - """Apply prefetching for the given field on the given entry + """ + Apply prefetching for the given field on the given entry Args: entry (ModelInstance): Instance of a model as returned by a django queryset diff --git a/dojo/api_v2/prefetch/schema.py b/dojo/api_v2/prefetch/schema.py index 535e01e4e6c..ef5cbbf389f 100644 --- a/dojo/api_v2/prefetch/schema.py +++ b/dojo/api_v2/prefetch/schema.py @@ -18,7 +18,8 @@ def _get_path_to_GET_serializer_map(generator): def get_serializer_ref_name(serializer): - """Get serializer's ref_name + """ + Get serializer's ref_name inspired by https://github.com/axnsan12/drf-yasg/blob/78031f0c189585c30fccb5005a6899f2d34289a9/src/drf_yasg/utils.py#L416 :param serializer: Serializer instance @@ -37,14 +38,14 @@ def get_serializer_ref_name(serializer): def prefetch_postprocessing_hook(result, generator, request, public): - """OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin. + """ + OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin. These have nothing to do with Django prefetch_related. The endpoints have an @extend_schema configured with an extra parameter 'prefetch' This parameter contains an array of relations to prefetch. These prefetched models will be returned in an additional property in the response. The below processor ensures the result schema matches this. """ - serializer_classes = _get_path_to_GET_serializer_map(generator) paths = result.get("paths", {}) diff --git a/dojo/api_v2/prefetch/utils.py b/dojo/api_v2/prefetch/utils.py index de7ea2b3834..5e588e03ce8 100644 --- a/dojo/api_v2/prefetch/utils.py +++ b/dojo/api_v2/prefetch/utils.py @@ -2,7 +2,8 @@ def _is_many_to_many_relation(field): - """Check if a field specified a many-to-many relationship as defined by django. + """ + Check if a field specified a many-to-many relationship as defined by django. This is the case if the field is an instance of the ManyToManyDescriptor as generated by the django framework @@ -16,7 +17,8 @@ def _is_many_to_many_relation(field): def _is_one_to_one_relation(field): - """Check if a field specified a one-to-one relationship as defined by django. + """ + Check if a field specified a one-to-one relationship as defined by django. This is the case if the field is an instance of the ForwardManyToOne as generated by the django framework @@ -30,7 +32,8 @@ def _is_one_to_one_relation(field): def _get_prefetchable_fields(serializer): - """Get the fields that are prefetchable according to the serializer description. + """ + Get the fields that are prefetchable according to the serializer description. Method mainly used by for automatic schema generation. Args: diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 1cc6d35ed09..471dfc019b5 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -77,6 +77,7 @@ Note_Type, NoteHistory, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -226,9 +227,7 @@ def to_internal_value(self, data): substrings = re.findall(r'(?:"[^"]*"|[^",]+)', s) data_safe.extend(substrings) - internal_value = tagulous.utils.render_tags(data_safe) - - return internal_value + return tagulous.utils.render_tags(data_safe) def to_representation(self, value): if not isinstance(value, list): @@ -304,8 +303,7 @@ def __str__(self): return json.dumps( self, sort_keys=True, indent=4, separators=(",", ": "), ) - else: - return json.dumps(self) + return json.dumps(self) class RequestResponseSerializerField(serializers.ListSerializer): @@ -556,8 +554,7 @@ def validate(self, data): if self.context["request"].method == "POST" and "password" not in data and settings.REQUIRE_PASSWORD_ON_USER: msg = "Passwords must be supplied for new users" raise ValidationError(msg) - else: - return super().validate(data) + return super().validate(data) class UserContactInfoSerializer(serializers.ModelSerializer): @@ -822,6 +819,7 @@ def validate(self, data): ) raise ValidationError(msg) return data + return None class RawFileSerializer(serializers.ModelSerializer): @@ -1074,8 +1072,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"engagement_id": engagement.id, "files": new_files} - return new_data + return {"engagement_id": engagement.id, "files": new_files} class EngagementCheckListSerializer(serializers.ModelSerializer): @@ -1147,8 +1144,7 @@ def run_validators(self, initial_data): if "finding, endpoint must make a unique set" in str(exc): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) from exc - else: - raise + raise def create(self, validated_data): endpoint = validated_data.get("endpoint") @@ -1161,8 +1157,7 @@ def create(self, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise status.mitigated = validated_data.get("mitigated", False) status.false_positive = validated_data.get("false_positive", False) status.out_of_scope = validated_data.get("out_of_scope", False) @@ -1178,8 +1173,7 @@ def update(self, instance, validated_data): if "finding, endpoint must make a unique set" in str(ie): msg = "This endpoint-finding relation already exists" raise serializers.ValidationError(msg) - else: - raise + raise class EndpointSerializer(TaggitSerializer, serializers.ModelSerializer): @@ -1440,8 +1434,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"test_id": test.id, "files": new_files} - return new_data + return {"test_id": test.id, "files": new_files} class TestImportFindingActionSerializer(serializers.ModelSerializer): @@ -1699,8 +1692,7 @@ def get_related_fields(self, obj): return FindingRelatedFieldsSerializer( required=False, ).to_representation(obj) - else: - return None + return None def get_display_status(self, obj) -> str: return obj.status() @@ -1744,8 +1736,7 @@ def update(self, instance, validated_data): # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(instance, to_be_tagged) - return tag_object + return self._save_tags(instance, to_be_tagged) def validate(self, data): if self.context["request"].method == "PATCH": @@ -1764,10 +1755,10 @@ def validate(self, data): is_risk_accepted = data.get("risk_accepted", False) if (is_active or is_verified) and is_duplicate: - msg = "Duplicate findings cannot be" " verified or active" + msg = "Duplicate findings cannot be verified or active" raise serializers.ValidationError(msg) if is_false_p and is_verified: - msg = "False positive findings cannot " "be verified." + msg = "False positive findings cannot be verified." raise serializers.ValidationError(msg) if is_risk_accepted and not self.instance.risk_accepted: @@ -1881,8 +1872,7 @@ def create(self, validated_data): # not sure why we are returning a tag_object, but don't want to change # too much now as we're just fixing a bug - tag_object = self._save_tags(new_finding, to_be_tagged) - return tag_object + return self._save_tags(new_finding, to_be_tagged) def validate(self, data): if "reporter" not in data: @@ -1939,6 +1929,8 @@ class Meta: exclude = ("cve",) def create(self, validated_data): + to_be_tagged, validated_data = self._pop_tags(validated_data) + # Save vulnerability ids and pop them if "vulnerability_id_template_set" in validated_data: vulnerability_id_set = validated_data.pop( @@ -1961,6 +1953,7 @@ def create(self, validated_data): ) new_finding_template.save() + self._save_tags(new_finding_template, to_be_tagged) return new_finding_template def update(self, instance, validated_data): @@ -2798,8 +2791,7 @@ def to_representation(self, data): "title": file.title, }, ) - new_data = {"finding_id": finding.id, "files": new_files} - return new_data + return {"finding_id": finding.id, "files": new_files} class FindingCloseSerializer(serializers.ModelSerializer): @@ -3056,10 +3048,9 @@ class QuestionnaireQuestionSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextQuestion): return TextQuestionSerializer(instance=instance).data - elif isinstance(instance, ChoiceQuestion): + if isinstance(instance, ChoiceQuestion): return ChoiceQuestionSerializer(instance=instance).data - else: - return QuestionSerializer(instance=instance).data + return QuestionSerializer(instance=instance).data class Meta: model = Question @@ -3096,10 +3087,9 @@ class QuestionnaireAnswerSerializer(serializers.ModelSerializer): def to_representation(self, instance): if isinstance(instance, TextAnswer): return TextAnswerSerializer(instance=instance).data - elif isinstance(instance, ChoiceAnswer): + if isinstance(instance, ChoiceAnswer): return ChoiceAnswerSerializer(instance=instance).data - else: - return AnswerSerializer(instance=instance).data + return AnswerSerializer(instance=instance).data class Meta: model = Answer @@ -3173,5 +3163,10 @@ def create(self, validated_data): if 'duplicate key value violates unique constraint "dojo_announcement_pkey"' in str(e): msg = "No more than one Announcement is allowed" raise serializers.ValidationError(msg) - else: - raise + raise + + +class NotificationWebhooksSerializer(serializers.ModelSerializer): + class Meta: + model = Notification_Webhooks + fields = "__all__" diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index a2672e87268..09e7cb734b5 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -111,6 +111,7 @@ Network_Locations, Note_Type, Notes, + Notification_Webhooks, Notifications, Product, Product_API_Scan_Configuration, @@ -872,8 +873,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.FindingCreateSerializer - else: - return serializers.FindingSerializer + return serializers.FindingSerializer @extend_schema( methods=["POST"], @@ -1220,10 +1220,9 @@ def remove_tags(self, request, pk=None): {"success": "Tag(s) Removed"}, status=status.HTTP_204_NO_CONTENT, ) - else: - return Response( - delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + delete_tags.errors, status=status.HTTP_400_BAD_REQUEST, + ) @extend_schema( responses={ @@ -1361,10 +1360,9 @@ def _add_metadata(self, request, finding): ) return Response(data=metadata_data.data, status=status.HTTP_200_OK) - else: - return Response( - metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + metadata_data.errors, status=status.HTTP_400_BAD_REQUEST, + ) def _remove_metadata(self, request, finding): name = request.query_params.get("name", None) @@ -1451,13 +1449,13 @@ def metadata(self, request, pk=None): if request.method == "GET": return self._get_metadata(request, finding) - elif request.method == "POST": + if request.method == "POST": return self._add_metadata(request, finding) - elif request.method == "PUT": + if request.method == "PUT": return self._edit_metadata(request, finding) - elif request.method == "PATCH": + if request.method == "PATCH": return self._edit_metadata(request, finding) - elif request.method == "DELETE": + if request.method == "DELETE": return self._remove_metadata(request, finding) return Response( @@ -1931,8 +1929,7 @@ def get_queryset(self): def get_serializer_class(self): if self.request and self.request.method == "POST": return serializers.StubFindingCreateSerializer - else: - return serializers.StubFindingSerializer + return serializers.StubFindingSerializer # Authorization: authenticated, configuration @@ -1986,8 +1983,7 @@ def get_serializer_class(self): if self.action == "accept_risks": return ra_api.AcceptedRiskSerializer return serializers.TestCreateSerializer - else: - return serializers.TestSerializer + return serializers.TestSerializer @extend_schema( request=serializers.ReportGenerateOptionSerializer, @@ -2374,6 +2370,7 @@ def get(self, request, format=None): # Authorization: authenticated users, DjangoModelPermissions class ImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet): + """ Imports a scan report into an engagement or product. @@ -2437,6 +2434,7 @@ def get_queryset(self): class EndpointMetaImporterView( mixins.CreateModelMixin, viewsets.GenericViewSet, ): + """ Imports a CSV file into a product to propagate arbitrary meta and tags on endpoints. @@ -2512,6 +2510,7 @@ def get_queryset(self): # Authorization: object-based class ReImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet): + """ Reimports a scan report into an existing test. @@ -2912,6 +2911,7 @@ def report_generate(request, obj, options): class SystemSettingsViewSet( mixins.ListModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet, ): + """Basic control over System Settings. Use 'id' 1 for PUT, PATCH operations""" permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) @@ -3087,3 +3087,13 @@ class AnnouncementViewSet( def get_queryset(self): return Announcement.objects.all().order_by("id") + + +class NotificationWebhooksViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.NotificationWebhooksSerializer + queryset = Notification_Webhooks.objects.all() + filter_backends = (DjangoFilterBackend,) + filterset_fields = "__all__" + permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) # TODO: add permission also for other users diff --git a/dojo/apps.py b/dojo/apps.py index e7a39ab5433..fd3a06575fd 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -92,8 +92,7 @@ def get_model_fields_with_extra(model, extra_fields=()): def get_model_fields(default_fields, extra_fields=()): - combined = default_fields + extra_fields - return combined + return default_fields + extra_fields def get_model_default_fields(model): diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py index a542d7c6e01..8f013b60061 100644 --- a/dojo/authorization/authorization.py +++ b/dojo/authorization/authorization.py @@ -66,7 +66,7 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_type_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Product) and permission.value >= Permissions.Product_View.value ): @@ -87,51 +87,51 @@ def user_has_permission(user, obj, permission): if role_has_permission(product_group.role.id, permission): return True return False - elif ( + if ( isinstance(obj, Engagement) and permission in Permissions.get_engagement_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Test) and permission in Permissions.get_test_permissions() ): return user_has_permission(user, obj.engagement.product, permission) - elif ( + if ( isinstance(obj, Finding) or isinstance(obj, Stub_Finding) ) and permission in Permissions.get_finding_permissions(): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Finding_Group) and permission in Permissions.get_finding_group_permissions() ): return user_has_permission( user, obj.test.engagement.product, permission, ) - elif ( + if ( isinstance(obj, Endpoint) and permission in Permissions.get_endpoint_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Languages) and permission in Permissions.get_language_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, App_Analysis) and permission in Permissions.get_technology_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_API_Scan_Configuration) and permission in Permissions.get_product_api_scan_configuration_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Product_Type_Member) and permission in Permissions.get_product_type_member_permissions() ): @@ -140,9 +140,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product_type, permission, ) - else: - return user_has_permission(user, obj.product_type, permission) - elif ( + return user_has_permission(user, obj.product_type, permission) + if ( isinstance(obj, Product_Member) and permission in Permissions.get_product_member_permissions() ): @@ -151,19 +150,18 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.product, permission, ) - else: - return user_has_permission(user, obj.product, permission) - elif ( + return user_has_permission(user, obj.product, permission) + if ( isinstance(obj, Product_Type_Group) and permission in Permissions.get_product_type_group_permissions() ): return user_has_permission(user, obj.product_type, permission) - elif ( + if ( isinstance(obj, Product_Group) and permission in Permissions.get_product_group_permissions() ): return user_has_permission(user, obj.product, permission) - elif ( + if ( isinstance(obj, Dojo_Group) and permission in Permissions.get_group_permissions() ): @@ -173,7 +171,7 @@ def user_has_permission(user, obj, permission): return group_member is not None and role_has_permission( group_member.role.id, permission, ) - elif ( + if ( isinstance(obj, Dojo_Group_Member) and permission in Permissions.get_group_member_permissions() ): @@ -182,9 +180,8 @@ def user_has_permission(user, obj, permission): return obj.user == user or user_has_permission( user, obj.group, permission, ) - else: - return user_has_permission(user, obj.group, permission) - elif ( + return user_has_permission(user, obj.group, permission) + if ( isinstance(obj, Cred_Mapping) and permission in Permissions.get_credential_permissions() ): @@ -202,9 +199,9 @@ def user_has_permission(user, obj, permission): return user_has_permission( user, obj.finding.test.engagement.product, permission, ) - else: - msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" - raise NoAuthorizationImplementedError(msg) + return None + msg = f"No authorization implemented for class {type(obj).__name__} and permission {permission}" + raise NoAuthorizationImplementedError(msg) def user_has_global_permission(user, permission): diff --git a/dojo/authorization/authorization_decorators.py b/dojo/authorization/authorization_decorators.py index 3063d0821d1..1f1bc9dbcb9 100644 --- a/dojo/authorization/authorization_decorators.py +++ b/dojo/authorization/authorization_decorators.py @@ -12,7 +12,6 @@ def user_is_authorized(model, permission, arg, lookup="pk", func=None): """Decorator for functions that ensures the user has permission on an object.""" - if func is None: return functools.partial( user_is_authorized, model, permission, arg, lookup, @@ -41,7 +40,6 @@ def _wrapped(request, *args, **kwargs): def user_has_global_permission(permission, func=None): """Decorator for functions that ensures the user has a (global) permission""" - if func is None: return functools.partial(user_has_global_permission, permission) @@ -54,10 +52,7 @@ def _wrapped(request, *args, **kwargs): def user_is_configuration_authorized(permission, func=None): - """ - Decorator for views that checks whether a user has a particular permission enabled. - """ - + """Decorator for views that checks whether a user has a particular permission enabled.""" if func is None: return functools.partial(user_is_configuration_authorized, permission) diff --git a/dojo/authorization/roles_permissions.py b/dojo/authorization/roles_permissions.py index 779463258ff..530008a2f7a 100644 --- a/dojo/authorization/roles_permissions.py +++ b/dojo/authorization/roles_permissions.py @@ -517,9 +517,7 @@ def get_roles_with_permissions(): def get_global_roles_with_permissions(): - """ - Extra permissions for global roles, on top of the permissions granted to the "normal" roles above. - """ + """Extra permissions for global roles, on top of the permissions granted to the "normal" roles above.""" return { Roles.Maintainer: {Permissions.Product_Type_Add}, Roles.Owner: {Permissions.Product_Type_Add}, diff --git a/dojo/cred/queries.py b/dojo/cred/queries.py index 4dd14385a06..28419772328 100644 --- a/dojo/cred/queries.py +++ b/dojo/cred/queries.py @@ -44,8 +44,6 @@ def get_authorized_cred_mappings(permission, queryset=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - cred_mappings = cred_mappings.filter( + return cred_mappings.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return cred_mappings diff --git a/dojo/cred/views.py b/dojo/cred/views.py index 31f923748b3..2fc373c3ac9 100644 --- a/dojo/cred/views.py +++ b/dojo/cred/views.py @@ -641,10 +641,8 @@ def delete_cred_controller(request, destination_url, id, ttid): if destination_url == "cred": return HttpResponseRedirect(reverse(destination_url)) - else: - return HttpResponseRedirect(reverse(destination_url, args=(id, ))) - else: - tform = CredMappingForm(instance=cred) + return HttpResponseRedirect(reverse(destination_url, args=(id, ))) + tform = CredMappingForm(instance=cred) add_breadcrumb(title="Delete Credential", top_level=False, request=request) product_tab = None diff --git a/dojo/db_migrations/0215_webhooks_notifications.py b/dojo/db_migrations/0215_webhooks_notifications.py new file mode 100644 index 00000000000..cc65ce43f1b --- /dev/null +++ b/dojo/db_migrations/0215_webhooks_notifications.py @@ -0,0 +1,130 @@ +# Generated by Django 5.0.8 on 2024-08-16 17:07 + +import django.db.models.deletion +import multiselectfield.db.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0214_test_type_dynamically_generated'), + ] + + operations = [ + migrations.AddField( + model_name='system_settings', + name='enable_webhooks_notifications', + field=models.BooleanField(default=False, verbose_name='Enable Webhook notifications'), + ), + migrations.AddField( + model_name='system_settings', + name='webhooks_notifications_timeout', + field=models.IntegerField(default=10, help_text='How many seconds will DefectDojo waits for response from webhook endpoint'), + ), + migrations.AlterField( + model_name='notifications', + name='auto_close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='close_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='code_review', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='engagement_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='jira_update', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=33, verbose_name='JIRA problems'), + ), + migrations.AlterField( + model_name='notifications', + name='other', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='product_type_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='review_requested', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='risk_acceptance_expiration', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) Risk Acceptance expiries', max_length=33, verbose_name='Risk Acceptance Expiration'), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='scan_added_empty', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=[], help_text='Triggered whenever an (re-)import has been done (even if that created/updated/closed no findings).', max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches', max_length=33, verbose_name='SLA breach'), + ), + migrations.AlterField( + model_name='notifications', + name='sla_breach_combined', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches (a message per project)', max_length=33, verbose_name='SLA breach (combined)'), + ), + migrations.AlterField( + model_name='notifications', + name='stale_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='test_added', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='upcoming_engagement', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.AlterField( + model_name='notifications', + name='user_mentioned', + field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('webhooks', 'webhooks'), ('alert', 'alert')], default=('alert', 'alert'), max_length=33), + ), + migrations.CreateModel( + name='Notification_Webhooks', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(default='', help_text='Name of the incoming webhook', max_length=100, unique=True)), + ('url', models.URLField(default='', help_text='The full URL of the incoming webhook')), + ('header_name', models.CharField(blank=True, default='', help_text='Name of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('header_value', models.CharField(blank=True, default='', help_text='Content of the header required for interacting with Webhook endpoint', max_length=100, null=True)), + ('status', models.CharField(choices=[('active', 'Active'), ('active_tmp', 'Active but 5xx (or similar) error detected'), ('inactive_tmp', 'Temporary inactive because of 5xx (or similar) error'), ('inactive_permanent', 'Permanently inactive')], default='active', editable=False, help_text='Status of the incoming webhook', max_length=20)), + ('first_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened first time', null=True)), + ('last_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened last time', null=True)), + ('note', models.CharField(blank=True, default='', editable=False, help_text='Description of the latest error', max_length=1000, null=True)), + ('owner', models.ForeignKey(blank=True, help_text='Owner/receiver of notification, if empty processed as system notification', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user')), + ], + ), + ] diff --git a/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py b/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py new file mode 100644 index 00000000000..fe9378b77de --- /dev/null +++ b/dojo/db_migrations/0216_alter_jira_project_push_all_issues.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.8 on 2024-10-03 23:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0215_webhooks_notifications'), + ] + + operations = [ + migrations.AlterField( + model_name='jira_project', + name='push_all_issues', + field=models.BooleanField(blank=True, default=False, help_text='Automatically create JIRA tickets for verified findings. Once linked, the JIRA ticket will continue to sync, regardless of status in DefectDojo.'), + ), + ] diff --git a/dojo/decorators.py b/dojo/decorators.py index 129106c74de..b6902b8dc10 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -43,8 +43,7 @@ def __wrapper__(*args, **kwargs): countdown = kwargs.pop("countdown", 0) if we_want_async(*args, func=func, **kwargs): return func.apply_async(args=args, kwargs=kwargs, countdown=countdown) - else: - return func(*args, **kwargs) + return func(*args, **kwargs) return __wrapper__ @@ -78,8 +77,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_to_id_internal - else: - return dojo_model_to_id_internal(_func) + return dojo_model_to_id_internal(_func) # decorator with parameters needs another wrapper layer @@ -123,8 +121,7 @@ def __wrapper__(*args, **kwargs): if _func is None: # decorator called without parameters return dojo_model_from_id_internal - else: - return dojo_model_from_id_internal(_func) + return dojo_model_from_id_internal(_func) def get_parameter_froms_args_kwargs(args, kwargs, parameter): diff --git a/dojo/endpoint/queries.py b/dojo/endpoint/queries.py index 581feefc13b..684eeab7b1a 100644 --- a/dojo/endpoint/queries.py +++ b/dojo/endpoint/queries.py @@ -53,12 +53,10 @@ def get_authorized_endpoints(permission, queryset=None, user=None): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)) - endpoints = endpoints.filter( + return endpoints.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - return endpoints - def get_authorized_endpoint_status(permission, queryset=None, user=None): @@ -101,8 +99,6 @@ def get_authorized_endpoint_status(permission, queryset=None, user=None): endpoint__product__member=Exists(authorized_product_roles), endpoint__product__prod_type__authorized_group=Exists(authorized_product_type_groups), endpoint__product__authorized_group=Exists(authorized_product_groups)) - endpoint_status = endpoint_status.filter( + return endpoint_status.filter( Q(endpoint__product__prod_type__member=True) | Q(endpoint__product__member=True) | Q(endpoint__product__prod_type__authorized_group=True) | Q(endpoint__product__authorized_group=True)) - - return endpoint_status diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index be1c63fb0c0..d5c378e5e97 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -79,17 +79,16 @@ def endpoint_get_or_create(**kwargs): count = qs.count() if count == 0: return Endpoint.objects.get_or_create(**kwargs) - elif count == 1: - return qs.order_by("id").first(), False - else: - logger.warning( - f"Endpoints in your database are broken. " - f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", - ) - # Get the oldest endpoint first, and return that instead - # a datetime is not captured on the endpoint model, so ID - # will have to work here instead + if count == 1: return qs.order_by("id").first(), False + logger.warning( + f"Endpoints in your database are broken. " + f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", + ) + # Get the oldest endpoint first, and return that instead + # a datetime is not captured on the endpoint model, so ID + # will have to work here instead + return qs.order_by("id").first(), False def clean_hosts_run(apps, change): @@ -325,7 +324,7 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me 'The column "hostname" must be present to map host to Endpoint.', extra_tags="alert-danger") return HttpResponseRedirect(reverse("import_endpoint_meta", args=(product.id, ))) - elif origin == "API": + if origin == "API": msg = 'The column "hostname" must be present to map host to Endpoint.' raise ValidationError(msg) @@ -361,14 +360,14 @@ def endpoint_meta_import(file, product, create_endpoints, create_tags, create_me for tag in existing_tags: if item[0] not in tag: continue - else: - # found existing. Update it - existing_tags.remove(tag) - break + # found existing. Update it + existing_tags.remove(tag) + break existing_tags += [item[0] + ":" + item[1]] # if tags are not supposed to be added, this value remain unchanged endpoint.tags = existing_tags endpoint.save() + return None def remove_broken_endpoint_statuses(apps): diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index 571f4989ec2..5a85a0a6468 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -98,9 +98,8 @@ def get_endpoint_ids(endpoints): key = f"{e.host}-{e.product.id}" if key in hosts: continue - else: - hosts.append(key) - ids.append(e.id) + hosts.append(key) + ids.append(e.id) return ids @@ -307,8 +306,7 @@ def add_meta_data(request, eid): extra_tags="alert-success") if "add_another" in request.POST: return HttpResponseRedirect(reverse("add_endpoint_meta_data", args=(eid,))) - else: - return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) + return HttpResponseRedirect(reverse("view_endpoint", args=(eid,))) else: form = DojoMetaDataForm() @@ -327,12 +325,12 @@ def edit_meta_data(request, eid): endpoint = Endpoint.objects.get(id=eid) if request.method == "POST": - for key, value in request.POST.items(): + for key, orig_value in request.POST.items(): if key.startswith("cfv_"): cfv_id = int(key.split("_")[1]) cfv = get_object_or_404(DojoMeta, id=cfv_id) - value = value.strip() + value = orig_value.strip() if value: cfv.value = value cfv.save() diff --git a/dojo/engagement/queries.py b/dojo/engagement/queries.py index 9d8e9b6ae41..97eeb31bdfa 100644 --- a/dojo/engagement/queries.py +++ b/dojo/engagement/queries.py @@ -39,8 +39,6 @@ def get_authorized_engagements(permission): product__member=Exists(authorized_product_roles), product__prod_type__authorized_group=Exists(authorized_product_type_groups), product__authorized_group=Exists(authorized_product_groups)).order_by("id") - engagements = engagements.filter( + return engagements.filter( Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) - - return engagements diff --git a/dojo/engagement/signals.py b/dojo/engagement/signals.py index c2f09c9abbd..7b95d6fe87b 100644 --- a/dojo/engagement/signals.py +++ b/dojo/engagement/signals.py @@ -16,7 +16,7 @@ def engagement_post_save(sender, instance, created, **kwargs): if created: title = _('Engagement created for "%(product)s": %(name)s') % {"product": instance.product, "name": instance.name} create_notification(event="engagement_added", title=title, engagement=instance, product=instance.product, - url=reverse("view_engagement", args=(instance.id,))) + url=reverse("view_engagement", args=(instance.id,)), url_api=reverse("engagement-detail", args=(instance.id,))) @receiver(pre_save, sender=Engagement) diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index ff86435d0cc..ea73bd80c63 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -166,15 +166,13 @@ def get_filtered_engagements(request, view): filter_string_matching = get_system_setting("filter_string_matching", False) filter_class = EngagementDirectFilterWithoutObjectLookups if filter_string_matching else EngagementDirectFilter - engagements = filter_class(request.GET, queryset=engagements) - - return engagements + return filter_class(request.GET, queryset=engagements) def get_test_counts(engagements): # Get the test counts per engagement. As a separate query, this is much # faster than annotating the above `engagements` query. - engagement_test_counts = { + return { test["engagement"]: test["test_count"] for test in Test.objects.filter( engagement__in=engagements, @@ -184,7 +182,6 @@ def get_test_counts(engagements): test_count=Count("engagement"), ) } - return engagement_test_counts def engagements(request, view): @@ -304,9 +301,8 @@ def edit_engagement(request, eid): if "_Add Tests" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(engagement.id, ))) - else: - return HttpResponseRedirect( - reverse("view_engagement", args=(engagement.id, ))) + return HttpResponseRedirect( + reverse("view_engagement", args=(engagement.id, ))) else: logger.debug(form.errors) @@ -404,12 +400,11 @@ def copy_engagement(request, eid): recipients=[engagement.lead], icon="exclamation-triangle") return redirect_to_return_url_or_else(request, reverse("view_engagements", args=(product.id, ))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy engagement, please try again.", - extra_tags="alert-danger") + messages.add_message( + request, + messages.ERROR, + "Unable to copy engagement, please try again.", + extra_tags="alert-danger") product_tab = Product_Tab(product, title="Copy Engagement", tab="engagements") return render(request, "dojo/copy_object.html", { @@ -427,8 +422,7 @@ def get_template(self): return "dojo/view_eng.html" def get_risks_accepted(self, eng): - risks_accepted = eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) - return risks_accepted + return eng.risk_acceptance.all().select_related("owner").annotate(accepted_findings_count=Count("accepted_findings__id")) def get_filtered_tests( self, @@ -673,10 +667,10 @@ def add_tests(request, eid): if "_Add Another Test" in request.POST: return HttpResponseRedirect( reverse("add_tests", args=(eng.id, ))) - elif "_Add Findings" in request.POST: + if "_Add Findings" in request.POST: return HttpResponseRedirect( reverse("add_findings", args=(new_test.id, ))) - elif "_Finished" in request.POST: + if "_Finished" in request.POST: return HttpResponseRedirect( reverse("view_engagement", args=(eng.id, ))) else: @@ -699,9 +693,7 @@ def add_tests(request, eid): class ImportScanResultsView(View): def get_template(self) -> str: - """ - Returns the template that will be presented to the user - """ + """Returns the template that will be presented to the user""" return "dojo/import_scan_results.html" def get_development_environment( @@ -721,9 +713,7 @@ def get_engagement_or_product( engagement_id: Optional[int] = None, product_id: Optional[int] = None, ) -> Tuple[Engagement, Product, Product | Engagement]: - """ - Using the path parameters, either fetch the product or engagement - """ + """Using the path parameters, either fetch the product or engagement""" engagement = product = engagement_or_product = None # Get the product if supplied # Get the engagement if supplied @@ -746,13 +736,10 @@ def get_form( request: HttpRequest, **kwargs: dict, ) -> ImportScanForm: - """ - Returns the default import form for importing findings - """ + """Returns the default import form for importing findings""" if request.method == "POST": return ImportScanForm(request.POST, request.FILES, **kwargs) - else: - return ImportScanForm(**kwargs) + return ImportScanForm(**kwargs) def get_credential_form( self, @@ -766,27 +753,24 @@ def get_credential_form( """ if request.method == "POST": return CredMappingForm(request.POST) - else: - # If the engagement is not present, return an empty form - if engagement is None: - return CredMappingForm() - # Otherwise get all creds in the associated engagement - return CredMappingForm( - initial={ - "cred_user_queryset": Cred_Mapping.objects.filter( - engagement=engagement, - ).order_by("cred_id"), - }, - ) + # If the engagement is not present, return an empty form + if engagement is None: + return CredMappingForm() + # Otherwise get all creds in the associated engagement + return CredMappingForm( + initial={ + "cred_user_queryset": Cred_Mapping.objects.filter( + engagement=engagement, + ).order_by("cred_id"), + }, + ) def get_jira_form( self, request: HttpRequest, engagement_or_product: Engagement | Product, ) -> Tuple[JIRAImportScanForm | None, bool]: - """ - Returns a JiraImportScanForm if jira is enabled - """ + """Returns a JiraImportScanForm if jira is enabled""" jira_form = None push_all_jira_issues = False # Determine if jira issues should be pushed automatically @@ -927,18 +911,14 @@ def get_importer( self, context: dict, ) -> BaseImporter: - """ - Gets the importer to use - """ + """Gets the importer to use""" return DefaultImporter(**context) def import_findings( self, context: dict, ) -> str | None: - """ - Attempt to import with all the supplied information - """ + """Attempt to import with all the supplied information""" try: importer_client = self.get_importer(context) context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan( @@ -960,9 +940,7 @@ def process_form( form: ImportScanForm, context: dict, ) -> str | None: - """ - Process the form and manipulate the input in any way that is appropriate - """ + """Process the form and manipulate the input in any way that is appropriate""" # Update the running context dict with cleaned form input context.update({ "scan": request.FILES.get("file", None), @@ -1032,9 +1010,7 @@ def process_credentials_form( form: CredMappingForm, context: dict, ) -> str | None: - """ - Process the credentials form by creating - """ + """Process the credentials form by creating""" if cred_user := form.cleaned_data["cred_user"]: # Select the credential mapping object from the selected list and only allow if the credential is associated with the product cred_user = Cred_Mapping.objects.filter( @@ -1054,18 +1030,14 @@ def success_redirect( self, context: dict, ) -> HttpResponseRedirect: - """ - Redirect the user to a place that indicates a successful import - """ + """Redirect the user to a place that indicates a successful import""" return HttpResponseRedirect(reverse("view_test", args=(context.get("test").id, ))) def failure_redirect( self, context: dict, ) -> HttpResponseRedirect: - """ - Redirect the user to a place that indicates a failed import - """ + """Redirect the user to a place that indicates a failed import""" return HttpResponseRedirect(reverse( "import_scan_results", args=(context.get("engagement", context.get("product")).id, ), @@ -1077,9 +1049,7 @@ def get( engagement_id: Optional[int] = None, product_id: Optional[int] = None, ) -> HttpResponse: - """ - Process GET requests for the Import View - """ + """Process GET requests for the Import View""" # process the request and path parameters request, context = self.handle_request( request, @@ -1095,9 +1065,7 @@ def post( engagement_id: Optional[int] = None, product_id: Optional[int] = None, ) -> HttpResponse: - """ - Process POST requests for the Import View - """ + """Process POST requests for the Import View""" # process the request and path parameters request, context = self.handle_request( request, @@ -1401,8 +1369,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False): if not errors: logger.debug("redirecting to return_url") return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid))) - else: - logger.error("errors found") + logger.error("errors found") else: if edit_mode: @@ -1549,8 +1516,7 @@ def upload_threatmodel(request, eid): @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") def view_threatmodel(request, eid): eng = get_object_or_404(Engagement, pk=eid) - response = FileResponse(open(eng.tmodel_path, "rb")) - return response + return FileResponse(open(eng.tmodel_path, "rb")) @user_is_authorized(Engagement, Permissions.Engagement_View, "eid") @@ -1589,9 +1555,8 @@ def get_engagements(request): if not url: msg = "Please use the export button when exporting engagements" raise ValidationError(msg) - else: - if url.startswith("url="): - url = url[4:] + if url.startswith("url="): + url = url[4:] path_items = list(filter(None, re.split(r"/|\?", url))) diff --git a/dojo/filters.py b/dojo/filters.py index 1461966c19e..35ceb205938 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -331,8 +331,7 @@ def get_tags_model_from_field_name(field): def get_tags_label_from_model(model): if model: return f"Tags ({model.__name__.title()})" - else: - return "Tags (Unknown)" + return "Tags (Unknown)" def get_finding_filterset_fields(metrics=False, similar=False, filter_string_matching=False): @@ -780,6 +779,7 @@ def any(self, qs, name): self.start_date = _truncate(start_date - timedelta(days=1)) self.end_date = _truncate(now() + timedelta(days=1)) return qs.all() + return None def current_month(self, qs, name): self.start_date = local_tz.localize( @@ -1927,8 +1927,7 @@ def set_hash_codes(self, *args: list, **kwargs: dict): def filter_queryset(self, *args: list, **kwargs: dict): queryset = super().filter_queryset(*args, **kwargs) queryset = get_authorized_findings(Permissions.Finding_View, queryset, self.user) - queryset = queryset.exclude(pk=self.finding.pk) - return queryset + return queryset.exclude(pk=self.finding.pk) class SimilarFindingFilter(FindingFilter, SimilarFindingHelper): diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index d52857f2291..1182cb26d68 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -566,7 +566,7 @@ def engagement_post_delete(sender, instance, **kwargs): def fix_loop_duplicates(): - """ Due to bugs in the past and even currently when under high parallel load, there can be transitive duplicates. """ + """Due to bugs in the past and even currently when under high parallel load, there can be transitive duplicates.""" """ i.e. A -> B -> C. This can lead to problems when deleting findingns, performing deduplication, etc """ candidates = Finding.objects.filter(duplicate_finding__isnull=False, original_finding__isnull=False).order_by("-id") diff --git a/dojo/finding/queries.py b/dojo/finding/queries.py index 7f213805a49..47386e43f86 100644 --- a/dojo/finding/queries.py +++ b/dojo/finding/queries.py @@ -68,14 +68,12 @@ def get_authorized_findings(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_stub_findings(permission): user = get_current_user() @@ -101,14 +99,12 @@ def get_authorized_stub_findings(permission): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)).order_by("id") - findings = findings.filter( + return findings.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - return findings - def get_authorized_vulnerability_ids(permission, queryset=None, user=None): @@ -151,10 +147,8 @@ def get_authorized_vulnerability_ids(permission, queryset=None, user=None): finding__test__engagement__product__member=Exists(authorized_product_roles), finding__test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists(authorized_product_groups)) - vulnerability_ids = vulnerability_ids.filter( + return vulnerability_ids.filter( Q(finding__test__engagement__product__prod_type__member=True) | Q(finding__test__engagement__product__member=True) | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) - - return vulnerability_ids diff --git a/dojo/finding/views.py b/dojo/finding/views.py index c6ca73fcad4..ea5578ee460 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -311,31 +311,29 @@ def get_test_id(self): def filter_findings_by_object(self, findings: QuerySet[Finding]): if product_id := self.get_product_id(): return findings.filter(test__engagement__product__id=product_id) - elif engagement_id := self.get_engagement_id(): + if engagement_id := self.get_engagement_id(): return findings.filter(test__engagement=engagement_id) - elif test_id := self.get_test_id(): + if test_id := self.get_test_id(): return findings.filter(test=test_id) - else: - return findings + return findings def filter_findings_by_filter_name(self, findings: QuerySet[Finding]): filter_name = self.get_filter_name() if filter_name == "Open": return findings.filter(finding_helper.OPEN_FINDINGS_QUERY) - elif filter_name == "Verified": + if filter_name == "Verified": return findings.filter(finding_helper.VERIFIED_FINDINGS_QUERY) - elif filter_name == "Out of Scope": + if filter_name == "Out of Scope": return findings.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY) - elif filter_name == "False Positive": + if filter_name == "False Positive": return findings.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY) - elif filter_name == "Inactive": + if filter_name == "Inactive": return findings.filter(finding_helper.INACTIVE_FINDINGS_QUERY) - elif filter_name == "Accepted": + if filter_name == "Accepted": return findings.filter(finding_helper.ACCEPTED_FINDINGS_QUERY) - elif filter_name == "Closed": + if filter_name == "Closed": return findings.filter(finding_helper.CLOSED_FINDINGS_QUERY) - else: - return findings + return findings def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Finding]): # Set up the args for the form @@ -358,9 +356,7 @@ def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Findi def get_filtered_findings(self): findings = get_authorized_findings(Permissions.Finding_View).order_by(self.get_order_by()) findings = self.filter_findings_by_object(findings) - findings = self.filter_findings_by_filter_name(findings) - - return findings + return self.filter_findings_by_filter_name(findings) def get_fully_filtered_findings(self, request: HttpRequest): findings = self.get_filtered_findings() @@ -1017,9 +1013,8 @@ def process_finding_form(self, request: HttpRequest, finding: Finding, context: ) return finding, request, True - else: - add_error_message_to_response("The form has errors, please correct them below.") - add_field_errors_to_response(context["form"]) + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) return finding, request, False @@ -1074,8 +1069,7 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic ) return request, True, push_to_jira - else: - add_field_errors_to_response(context["jform"]) + add_field_errors_to_response(context["jform"]) return request, False, False @@ -1090,8 +1084,7 @@ def process_github_form(self, request: HttpRequest, finding: Finding, context: d add_external_issue(finding, "github") return request, True - else: - add_field_errors_to_response(context["gform"]) + add_field_errors_to_response(context["gform"]) return request, False @@ -1316,10 +1309,9 @@ def close_finding(request, fid): return HttpResponseRedirect( reverse("view_test", args=(finding.test.id,)), ) - else: - return HttpResponseRedirect( - reverse("close_finding", args=(finding.id,)), - ) + return HttpResponseRedirect( + reverse("close_finding", args=(finding.id,)), + ) product_tab = Product_Tab( finding.test.engagement.product, title="Close", tab="findings", @@ -1502,15 +1494,14 @@ def apply_template_cwe(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_finding", args=(fid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to apply CWE template finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to apply CWE template finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") @@ -1549,13 +1540,12 @@ def copy_finding(request, fid): return redirect_to_return_url_or_else( request, reverse("view_test", args=(test.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to copy finding, please try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to copy finding, please try again.", + extra_tags="alert-danger", + ) product_tab = Product_Tab(product, title="Copy Finding", tab="findings") return render( @@ -2002,8 +1992,7 @@ def apply_template_to_finding(request, fid, tid): ) return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) - else: - return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) + return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) @user_is_authorized(Test, Permissions.Finding_Add, "tid") @@ -2063,15 +2052,14 @@ def delete_stub_finding(request, fid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("view_test", args=(tid,))) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete potential finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete potential finding, please try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied @user_is_authorized(Stub_Finding, Permissions.Finding_Edit, "fid") @@ -2188,13 +2176,12 @@ def promote_to_finding(request, fid): ) return HttpResponseRedirect(reverse("view_test", args=(test.id,))) - else: - form_error = True - add_error_message_to_response( - "The form has errors, please correct them below.", - ) - add_field_errors_to_response(jform) - add_field_errors_to_response(form) + form_error = True + add_error_message_to_response( + "The form has errors, please correct them below.", + ) + add_field_errors_to_response(jform) + add_field_errors_to_response(form) else: form = PromoteFindingForm( initial={ @@ -2356,13 +2343,12 @@ def add_template(request): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) add_breadcrumb(title="Add Template", top_level=False, request=request) return render( request, "dojo/add_template.html", {"form": form, "name": "Add Template"}, @@ -2411,13 +2397,12 @@ def edit_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Template form has error, please revise and try again.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Template form has error, please revise and try again.", + extra_tags="alert-danger", + ) count = apply_cwe_mitigation(apply_to_findings=True, template=template, update=False) add_breadcrumb(title="Edit Template", top_level=False, request=request) @@ -2447,15 +2432,14 @@ def delete_template(request, tid): extra_tags="alert-success", ) return HttpResponseRedirect(reverse("templates")) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete Template, please revise and try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied + messages.add_message( + request, + messages.ERROR, + "Unable to delete Template, please revise and try again.", + extra_tags="alert-danger", + ) + return None + raise PermissionDenied def download_finding_pic(request, token): @@ -2661,13 +2645,12 @@ def merge_finding_product(request, pid): return HttpResponseRedirect( reverse("edit_finding", args=(finding_to_merge_into.id,)), ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to merge findings. Findings to merge contained in finding to merge into.", - extra_tags="alert-danger", - ) + messages.add_message( + request, + messages.ERROR, + "Unable to merge findings. Findings to merge contained in finding to merge into.", + extra_tags="alert-danger", + ) else: messages.add_message( request, @@ -3137,8 +3120,7 @@ def find_available_notetypes(notes): break else: available_note_types.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=available_note_types).order_by("-id") - return queryset + return Note_Type.objects.filter(id__in=available_note_types).order_by("-id") def get_missing_mandatory_notetypes(finding): @@ -3153,8 +3135,7 @@ def get_missing_mandatory_notetypes(finding): break else: notes_to_be_added.append(note_type_id) - queryset = Note_Type.objects.filter(id__in=notes_to_be_added) - return queryset + return Note_Type.objects.filter(id__in=notes_to_be_added) @user_is_authorized(Finding, Permissions.Finding_Edit, "original_id") diff --git a/dojo/finding_group/queries.py b/dojo/finding_group/queries.py index aae57f53c83..39b91c02665 100644 --- a/dojo/finding_group/queries.py +++ b/dojo/finding_group/queries.py @@ -46,10 +46,8 @@ def get_authorized_finding_groups(permission, queryset=None, user=None): test__engagement__product__member=Exists(authorized_product_roles), test__engagement__product__prod_type__authorized_group=Exists(authorized_product_type_groups), test__engagement__product__authorized_group=Exists(authorized_product_groups)) - finding_groups = finding_groups.filter( + return finding_groups.filter( Q(test__engagement__product__prod_type__member=True) | Q(test__engagement__product__member=True) | Q(test__engagement__product__prod_type__authorized_group=True) | Q(test__engagement__product__authorized_group=True)) - - return finding_groups diff --git a/dojo/fixtures/dojo_testdata.json b/dojo/fixtures/dojo_testdata.json index 62486cb90cf..ae550f8bf81 100644 --- a/dojo/fixtures/dojo_testdata.json +++ b/dojo/fixtures/dojo_testdata.json @@ -227,6 +227,7 @@ "url_prefix": "", "enable_slack_notifications": false, "enable_mail_notifications": false, + "enable_webhooks_notifications": true, "email_from": "no-reply@example.com", "false_positive_history": false, "msteams_url": "", @@ -2926,11 +2927,27 @@ "pk": 1, "model": "dojo.notifications", "fields": { - "product": 1, - "user": 2, - "product_type_added": [ - "slack" - ] + "product": null, + "user": null, + "template": false, + "product_type_added": "webhooks,alert", + "product_added": "webhooks,alert", + "engagement_added": "webhooks,alert", + "test_added": "webhooks,alert", + "scan_added": "webhooks,alert", + "scan_added_empty": "webhooks", + "jira_update": "alert", + "upcoming_engagement": "alert", + "stale_engagement": "alert", + "auto_close_engagement": "alert", + "close_engagement": "alert", + "user_mentioned": "alert", + "code_review": "alert", + "review_requested": "alert", + "other": "alert", + "sla_breach": "alert", + "risk_acceptance_expiration": "alert", + "sla_breach_combined": "alert" } }, { @@ -3045,5 +3062,35 @@ "dismissable": true, "style": "danger" } + }, + { + "model": "dojo.notification_webhooks", + "pk": 1, + "fields": { + "name": "My webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token xxx", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": null + } + }, + { + "model": "dojo.notification_webhooks", + "pk": 2, + "fields": { + "name": "My personal webhook endpoint", + "url": "http://webhook.endpoint:8080/post", + "header_name": "Auth", + "header_value": "Token secret", + "status": "active", + "first_error": null, + "last_error": null, + "note": null, + "owner": 2 + } } ] \ No newline at end of file diff --git a/dojo/forms.py b/dojo/forms.py index cbeaa0a0c31..1dd52671c45 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -72,6 +72,7 @@ JIRA_Project, Note_Type, Notes, + Notification_Webhooks, Notifications, Objects_Product, Product, @@ -139,6 +140,7 @@ def render(self, name, *args, **kwargs): class MonthYearWidget(Widget): + """ A Widget that splits date input into two + + + +{% endblock %} diff --git a/dojo/templates/dojo/add_related.html b/dojo/templates/dojo/add_related.html index f6c74e1c530..682df6045ef 100644 --- a/dojo/templates/dojo/add_related.html +++ b/dojo/templates/dojo/add_related.html @@ -29,7 +29,7 @@ - + diff --git a/dojo/templates/dojo/delete_notification_webhook.html b/dojo/templates/dojo/delete_notification_webhook.html new file mode 100644 index 00000000000..f196ad94fc9 --- /dev/null +++ b/dojo/templates/dojo/delete_notification_webhook.html @@ -0,0 +1,12 @@ +{% extends "base.html" %} +{% block content %} +

Delete Notification Webhook

+
{% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
+
+ +
+
+
+{% endblock %} diff --git a/dojo/templates/dojo/edit_notification_webhook.html b/dojo/templates/dojo/edit_notification_webhook.html new file mode 100644 index 00000000000..94bd56c2307 --- /dev/null +++ b/dojo/templates/dojo/edit_notification_webhook.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} + {% block content %} + {{ block.super }} +

Edit Notification Webhook

+
{% csrf_token %} + {% include "dojo/form_fields.html" with form=form %} +
+
+ + +
+
+
+ {% endblock %} + \ No newline at end of file diff --git a/dojo/templates/dojo/notifications.html b/dojo/templates/dojo/notifications.html index 52d87393c45..81fac49d5cc 100644 --- a/dojo/templates/dojo/notifications.html +++ b/dojo/templates/dojo/notifications.html @@ -89,6 +89,9 @@

{% if 'mail' in enabled_notifications %} {% trans "Mail" %} {% endif %} + {% if 'webhooks' in enabled_notifications %} + {% trans "Webhooks" %} + {% endif %} {% trans "Alert" %} diff --git a/dojo/templates/dojo/report_cover_page.html b/dojo/templates/dojo/report_cover_page.html index 8e936cd9618..0130d08f845 100644 --- a/dojo/templates/dojo/report_cover_page.html +++ b/dojo/templates/dojo/report_cover_page.html @@ -6,7 +6,7 @@
 

- + DefectDojo Logo

{{ report_title }}

diff --git a/dojo/templates/dojo/system_settings.html b/dojo/templates/dojo/system_settings.html index 693abe712f0..02510452e16 100644 --- a/dojo/templates/dojo/system_settings.html +++ b/dojo/templates/dojo/system_settings.html @@ -62,7 +62,7 @@

System Settings

} $(function () { - $.each(['slack','msteams','mail', 'grade'], function (index, value) { + $.each(['slack','msteams','mail','webhooks','grade'], function (index, value) { updatenotificationsgroup(value); $('#id_enable_' + value + '_notifications').change(function() { updatenotificationsgroup(value)}); }); diff --git a/dojo/templates/dojo/view_notification_webhooks.html b/dojo/templates/dojo/view_notification_webhooks.html new file mode 100644 index 00000000000..6b02c0888d3 --- /dev/null +++ b/dojo/templates/dojo/view_notification_webhooks.html @@ -0,0 +1,101 @@ +{% extends "base.html" %} +{% load navigation_tags %} +{% load display_tags %} +{% load i18n %} +{% load authorization_tags %} +{% block content %} + {{ block.super }} +
+
+
+
+

+ Notification Webhook List + +

+
+ +
+ {% if nwhs %} + +
+ {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
+ +
+ + + + + + + + + + + + {% for nwh in nwhs %} + + + + + + + {% if "dojo.edit_notification_webhook"|has_configuration_permission:request %} + + {% endif %} + + {% endfor %} + +
{% dojo_sort request 'Notification Webhook Name' 'name' 'asc' %}URLStatusNoteOwner
{{ nwh.name }}{{ nwh.url }}{{ nwh.get_status_display }} + {% if nwh.first_error or nwh.last_error %} + + {% endif %} + {{ nwh.note }}{% if nwh.owner %}{{ nwh.owner }}{% else %}System Webhook{% endif %} + +
+
+
+ {% include "dojo/paging_snippet.html" with page=nwhs page_size=True %} +
+ {% else %} +

No Notification Webook found.

+ {% endif %} +
+
+{% endblock %} +{% block postscript %} + {{ block.super }} + {% include "dojo/filter_js_snippet.html" %} +{% endblock %} diff --git a/dojo/templates/dojo/view_product_details.html b/dojo/templates/dojo/view_product_details.html index ea4514d7356..3f7ea62ce32 100644 --- a/dojo/templates/dojo/view_product_details.html +++ b/dojo/templates/dojo/view_product_details.html @@ -687,7 +687,7 @@