diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml index 238790da289..7253ba132f7 100644 --- a/.github/workflows/build-docker-images-for-testing.yml +++ b/.github/workflows/build-docker-images-for-testing.yml @@ -28,14 +28,14 @@ jobs: run: echo "IMAGE_REPOSITORY=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: buildkitd-flags: --debug driver-opts: image=moby/buildkit:master # needed to get the fix for https://github.com/moby/buildkit/issues/2426 - name: Build id: docker_build - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . push: false diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 51567c907e3..8f42ec29053 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -78,7 +78,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.7.2 + uses: manusa/actions-setup-minikube@v2.9.0 with: minikube version: 'v1.24.0' kubernetes version: ${{ matrix.k8s }} diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index a035195c2e8..328be086031 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -32,7 +32,7 @@ jobs: platform: [amd64] steps: - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -47,7 +47,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Cache Docker layers uses: actions/cache@v3 @@ -63,7 +63,7 @@ jobs: - name: Build and push images with debian if: ${{ matrix.os == 'debian' }} - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 env: REPO_ORG: ${{ env.repoorg }} docker-image: ${{ matrix.docker-image }} @@ -77,7 +77,7 @@ jobs: - name: Build and push images with alpine if: ${{ matrix.os == 'alpine' }} - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 env: REPO_ORG: ${{ env.repoorg }} docker-image: ${{ matrix.docker-image }} diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 259c8dc0f90..6b2e7e97a62 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,7 +1,7 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.0.0@sha256:469376dae86c38cb4152b9b820a93d2e74d27a442ea99014f8c7f4a6f2848b9f as openapitools +FROM openapitools/openapi-generator-cli:v7.0.1@sha256:26e3add1a66473bdac63cd3eeec9363d776c343eb50e5e66e97b9ad0d34beaf4 as openapitools FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as build WORKDIR /app RUN \ diff --git a/components/package.json b/components/package.json index e4bc45adb6c..f1ee3fdae0f 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.27.0-dev", + "version": "2.28.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/docker-compose.yml b/docker-compose.yml index eb4ca1e8eff..ebc59d2a842 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -138,8 +138,8 @@ services: volumes: - defectdojo_data:/var/lib/mysql postgres: - image: postgres:15.4-alpine@sha256:8bc3c893342c766481df5fde58fab6f1a1115b94eb56778126163305243e9709 - profiles: + image: postgres:16.0-alpine@sha256:2ccd6655060d7b06c71f86094e8c7a28bdcc8a80b43baca4b1dabb29cff138a2 + profiles: - postgres-rabbitmq - postgres-redis environment: @@ -149,15 +149,15 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data rabbitmq: - image: rabbitmq:3.12.4-alpine@sha256:1db3f856e6628e2ac512a91959437ca5bab5112c856fe730b6b5ff5087e5e3d0 - profiles: + image: rabbitmq:3.12.6-alpine@sha256:a21880dc5e2b4581c0dd762337c7112475a2d8daba697e1c6192923ebad91739 + profiles: - mysql-rabbitmq - postgres-rabbitmq volumes: - defectdojo_rabbitmq:/var/lib/rabbitmq redis: - image: redis:7.2.0-alpine@sha256:fd5de2340bc46cbc2241975ab027797c350dec6fd86349e3ac384e3a41be6fee - profiles: + image: redis:7.2.1-alpine@sha256:9150d86fe2a9d03bbdb15bb9758fa5e3d24632386af8f6eb4d675ee4c976f499 + profiles: - mysql-redis - postgres-redis volumes: diff --git a/docs/content/en/getting_started/running-in-production.md b/docs/content/en/getting_started/running-in-production.md index 61d67b61e80..6da16d253b7 100644 --- a/docs/content/en/getting_started/running-in-production.md +++ b/docs/content/en/getting_started/running-in-production.md @@ -1,6 +1,6 @@ --- title: "Running in production" -description: "For use in Produciton environments, performance tweaks and backups are recommended." +description: "For use in Production environments, performance tweaks and backups are recommended." draft: false weight: 4 --- @@ -79,7 +79,9 @@ You can execute the following command to see the configuration: `docker-compose exec celerybeat bash -c "celery -A dojo inspect stats"` and see what is in effect. -###### Asynchronous Imports +#### Asynchronous Import + +**Please note: Asynchronous Import is currently an experimental feature. Please exercise caution with this method as results may be inconsistent.** Import and Re-Import can also be configured to handle uploads asynchronously to aid in processing especially large scans. It works by batching Findings and Endpoints by a diff --git a/docs/content/en/integrations/parsers/file/anchore_engine.md b/docs/content/en/integrations/parsers/file/anchore_engine.md index e30eeb7ff6a..b5804b1146e 100644 --- a/docs/content/en/integrations/parsers/file/anchore_engine.md +++ b/docs/content/en/integrations/parsers/file/anchore_engine.md @@ -2,5 +2,39 @@ title: "Anchore-Engine" toc_hide: true --- -JSON vulnerability report generated by anchore-cli tool, using a command -like `anchore-cli --json image vuln all` + +### File Types +DefectDojo parser accepts a .json file. + +Using the [Anchore CLI](https://docs.anchore.com/current/docs/using/cli_usage/images/inspecting_image_content/) is the most reliable way to generate an Anchore report which DefectDojo can parse. When generating a report with the Anchore CLI, please use the following command to ensure complete data: `anchore-cli --json image vuln all` + +### Acceptable JSON Format +All properties are strings and are required by the parser. + +~~~ + +{ + "imageDigest": "sha256:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "vulnerabilities": [ + { + "feed": "example-feed", + "feed_group": "example-feed-group", + "fix": "1.2.4", + "package": "example-package", + "package_cpe": "cpe:2.3:a:*:example:1.2.3:*:*:*:*:*:*:*", + "package_name": "example-package-name", + "package_path": "path/to/package", + "package_type": "dpkg", + "package_version": "1.2.3", + "severity": "Medium", + "url": "https://example.com/cve/CVE-2011-3389", + "vuln": "CVE-2011-3389" + }, + ... + ], + "vulnerability_type": "os" +} +~~~ + +### Sample Scan Data +Sample Anchore-Engine scans can be found at https://github.com/DefectDojo/sample-scan-files/tree/master/anchore_engine . diff --git a/docs/content/en/integrations/parsers/file/kubehunter.md b/docs/content/en/integrations/parsers/file/kubehunter.md new file mode 100644 index 00000000000..7b3de0a55b3 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/kubehunter.md @@ -0,0 +1,5 @@ +--- +title: "kubeHunter Scanner" +toc_hide: true +--- +Import JSON reports of kube-hunter scans. Use "kube-hunter --report json" to produce the report in json format. diff --git a/docs/package-lock.json b/docs/package-lock.json index dbf8c2d94c7..45435c98391 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -5,8 +5,8 @@ "packages": { "": { "devDependencies": { - "autoprefixer": "10.4.15", - "postcss": "8.4.29", + "autoprefixer": "10.4.16", + "postcss": "8.4.31", "postcss-cli": "10.1.0" } }, @@ -83,9 +83,9 @@ } }, "node_modules/autoprefixer": { - "version": "10.4.15", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", - "integrity": "sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew==", + "version": "10.4.16", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", + "integrity": "sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==", "dev": true, "funding": [ { @@ -103,8 +103,8 @@ ], "dependencies": { "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001520", - "fraction.js": "^4.2.0", + "caniuse-lite": "^1.0.30001538", + "fraction.js": "^4.3.6", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" @@ -173,9 +173,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001520", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001520.tgz", - "integrity": "sha512-tahF5O9EiiTzwTUqAeFjIZbn4Dnqxzz7ktrgGlMYNLH43Ul26IgTMH/zvL3DG0lZxBYnlT04axvInszUsZULdA==", + "version": "1.0.30001538", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001538.tgz", + "integrity": "sha512-HWJnhnID+0YMtGlzcp3T9drmBJUVDchPJ08tpUGFLs9CYlwWPH2uLgpHn8fND5pCgXVtnGS3H4QR9XLMHVNkHw==", "dev": true, "funding": [ { @@ -328,16 +328,16 @@ } }, "node_modules/fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.6.tgz", + "integrity": "sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg==", "dev": true, "engines": { "node": "*" }, "funding": { "type": "patreon", - "url": "https://www.patreon.com/infusion" + "url": "https://github.com/sponsors/rawify" } }, "node_modules/fs-extra": { @@ -608,9 +608,9 @@ } }, "node_modules/postcss": { - "version": "8.4.29", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz", - "integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "dev": true, "funding": [ { @@ -1043,14 +1043,14 @@ } }, "autoprefixer": { - "version": "10.4.15", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", - "integrity": "sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew==", + "version": "10.4.16", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", + "integrity": "sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==", "dev": true, "requires": { "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001520", - "fraction.js": "^4.2.0", + "caniuse-lite": "^1.0.30001538", + "fraction.js": "^4.3.6", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" @@ -1084,9 +1084,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001520", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001520.tgz", - "integrity": "sha512-tahF5O9EiiTzwTUqAeFjIZbn4Dnqxzz7ktrgGlMYNLH43Ul26IgTMH/zvL3DG0lZxBYnlT04axvInszUsZULdA==", + "version": "1.0.30001538", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001538.tgz", + "integrity": "sha512-HWJnhnID+0YMtGlzcp3T9drmBJUVDchPJ08tpUGFLs9CYlwWPH2uLgpHn8fND5pCgXVtnGS3H4QR9XLMHVNkHw==", "dev": true }, "chokidar": { @@ -1196,9 +1196,9 @@ } }, "fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.6.tgz", + "integrity": "sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg==", "dev": true }, "fs-extra": { @@ -1382,9 +1382,9 @@ "dev": true }, "postcss": { - "version": "8.4.29", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz", - "integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "dev": true, "requires": { "nanoid": "^3.3.6", diff --git a/docs/package.json b/docs/package.json index 7f32365d646..f867434703b 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,7 +1,7 @@ { "devDependencies": { - "postcss": "8.4.29", - "autoprefixer": "10.4.15", + "postcss": "8.4.31", + "autoprefixer": "10.4.16", "postcss-cli": "10.1.0" } } diff --git a/dojo/__init__.py b/dojo/__init__.py index 7f86aafb6a1..e7576a482eb 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '2.27.0-dev' +__version__ = '2.28.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index 8f8024c93de..32656a41199 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -3978,6 +3978,7 @@ class SLAConfigurationViewset( mixins.DestroyModelMixin, mixins.CreateModelMixin, viewsets.GenericViewSet, + dojo_mixins.DeletePreviewModelMixin, ): serializer_class = serializers.SLAConfigurationSerializer queryset = SLA_Configuration.objects.all() diff --git a/dojo/filters.py b/dojo/filters.py index 830aa05c2fd..37d89c1f606 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -22,7 +22,8 @@ from dojo.models import Dojo_User, Finding_Group, Product_API_Scan_Configuration, Product_Type, Finding, Product, Test_Import, Test_Type, \ Endpoint, Development_Environment, Finding_Template, Note_Type, Risk_Acceptance, Cred_Mapping, \ Engagement_Survey, Question, TextQuestion, ChoiceQuestion, Endpoint_Status, Engagement, \ - ENGAGEMENT_STATUS_CHOICES, Test, App_Analysis, SEVERITY_CHOICES, EFFORT_FOR_FIXING_CHOICES, Dojo_Group, Vulnerability_Id + ENGAGEMENT_STATUS_CHOICES, Test, App_Analysis, SEVERITY_CHOICES, EFFORT_FOR_FIXING_CHOICES, Dojo_Group, Vulnerability_Id, \ + Test_Import_Finding_Action, IMPORT_ACTIONS from dojo.utils import get_system_setting from django.contrib.contenttypes.models import ContentType import tagulous @@ -723,6 +724,8 @@ class EngagementDirectFilter(DojoFilter): product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), label="Product Type") + test__engagement__product__lifecycle = MultipleChoiceFilter( + choices=Product.LIFECYCLE_CHOICES, label='Product lifecycle') status = MultipleChoiceFilter(choices=ENGAGEMENT_STATUS_CHOICES, label="Status") @@ -787,6 +790,8 @@ class EngagementFilter(DojoFilter): prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), label="Product Type") + engagement__product__lifecycle = MultipleChoiceFilter( + choices=Product.LIFECYCLE_CHOICES, label='Product lifecycle') engagement__status = MultipleChoiceFilter(choices=ENGAGEMENT_STATUS_CHOICES, label="Status") @@ -1185,6 +1190,8 @@ class ApiFindingFilter(DojoFilter): title = CharFilter(lookup_expr='icontains') product_name = CharFilter(lookup_expr='engagement__product__name__iexact', field_name='test', label='exact product name') product_name_contains = CharFilter(lookup_expr='engagement__product__name__icontains', field_name='test', label='exact product name') + product_lifecycle = CharFilter(method=custom_filter, lookup_expr='engagement__product__lifecycle', + field_name='test__engagement__product__lifecycle', label='Comma separated list of exact product lifecycles') # DateRangeFilter created = DateRangeFilter() date = DateRangeFilter() @@ -1312,6 +1319,9 @@ class FindingFilter(FindingFilterWithTags): queryset=Product_Type.objects.none(), label="Product Type") + test__engagement__product__lifecycle = MultipleChoiceFilter( + choices=Product.LIFECYCLE_CHOICES, label='Product lifecycle') + test__engagement__product = ModelMultipleChoiceFilter( queryset=Product.objects.none(), label="Product") @@ -2238,6 +2248,20 @@ class Meta: fields = [] +class TestImportFindingActionFilter(DojoFilter): + action = MultipleChoiceFilter(choices=IMPORT_ACTIONS) + o = OrderingFilter( + # tuple-mapping retains order + fields=( + ('action', 'action'), + ) + ) + + class Meta: + model = Test_Import_Finding_Action + fields = [] + + class LogEntryFilter(DojoFilter): from auditlog.models import LogEntry diff --git a/dojo/finding/urls.py b/dojo/finding/urls.py index 75fc17c7221..27549aeca16 100644 --- a/dojo/finding/urls.py +++ b/dojo/finding/urls.py @@ -3,85 +3,152 @@ from dojo.finding import views urlpatterns = [ + # CRUD operations + re_path( + r'^finding/(?P\d+)$', + views.ViewFinding.as_view(), + name='view_finding' + ), + re_path( + r'^finding/(?P\d+)/edit$', + views.EditFinding.as_view(), + name='edit_finding' + ), + re_path( + r'^finding/(?P\d+)/delete$', + views.DeleteFinding.as_view(), + name='delete_finding' + ), + # Listing operations + re_path( + r'^finding$', + views.ListFindings.as_view(), + name='all_findings' + ), + re_path( + r'^finding/open$', + views.ListOpenFindings.as_view(), + name='open_findings' + ), + re_path( + r'^finding/verified$', + views.ListVerifiedFindings.as_view(), + name='verified_findings' + ), + re_path( + r'^finding/closed$', + views.ListClosedFindings.as_view(), + name='closed_findings' + ), + re_path( + r'^finding/accepted$', + views.ListAcceptedFindings.as_view(), + name='accepted_findings' + ), + re_path( + r'^product/(?P\d+)/finding/open$', + views.ListOpenFindings.as_view(), + name='product_open_findings' + ), + re_path( + r'^product/(?P\d+)/findings$', + views.ListOpenFindings.as_view(), + name='view_product_findings_old' + ), + re_path( + r'^product/(?P\d+)/finding/verified$', + views.ListVerifiedFindings.as_view(), + name='product_verified_findings' + ), + re_path( + r'^product/(?P\d+)/finding/out_of_scope$', + views.ListOutOfScopeFindings.as_view(), + name='product_out_of_scope_findings' + ), + re_path( + r'^product/(?P\d+)/finding/inactive$', + views.ListInactiveFindings.as_view(), + name='product_inactive_findings' + ), + re_path( + r'^product/(?P\d+)/finding/all$', + views.ListFindings.as_view(), + name='product_all_findings' + ), + re_path( + r'^product/(?P\d+)/finding/closed$', + views.ListClosedFindings.as_view(), + name='product_closed_findings' + ), + re_path( + r'^product/(?P\d+)/finding/false_positive$', + views.ListFalsePositiveFindings.as_view(), + name='product_false_positive_findings' + ), + re_path( + r'^product/(?P\d+)/finding/accepted$', + views.ListAcceptedFindings.as_view(), + name='product_accepted_findings' + ), + re_path( + r'^engagement/(?P\d+)/finding/open$', + views.ListOpenFindings.as_view(), + name='engagement_open_findings' + ), + re_path( + r'^engagement/(?P\d+)/finding/closed$', + views.ListClosedFindings.as_view(), + name='engagement_closed_findings' + ), + re_path( + r'^engagement/(?P\d+)/finding/verified$', + views.ListVerifiedFindings.as_view(), + name='engagement_verified_findings' + ), + re_path( + r'^engagement/(?P\d+)/finding/accepted$', + views.ListAcceptedFindings.as_view(), + name='engagement_accepted_findings' + ), + re_path( + r'^engagement/(?P\d+)/finding/all$', + views.ListFindings.as_view(), + name='engagement_all_findings' + ), # findings - re_path(r'^finding$', views.open_findings, {'view': 'All'}, - name='all_findings'), re_path(r'^finding/bulk$', views.finding_bulk_update_all, name='finding_bulk_update_all'), re_path(r'^product/(?P\d+)/finding/bulk_product$', views.finding_bulk_update_all, name='finding_bulk_update_all_product'), # re_path(r'^test/(?P\d+)/bulk', views.finding_bulk_update_all, # name='finding_bulk_update_all_test'), - re_path(r'^finding/open$', views.open_findings, - name='open_findings'), - re_path(r'^finding/verified$', views.verified_findings, - name='verified_findings'), - re_path(r'^product/(?P\d+)/finding/open$', views.open_findings, - name='product_open_findings'), - # legacy url kept for old bookmarks etc - re_path(r'^product/(?P\d+)/findings$', views.open_findings, - name='view_product_findings_old'), - re_path(r'^product/(?P\d+)/finding/verified$', views.verified_findings, - name='product_verified_findings'), - re_path(r'^product/(?P\d+)/finding/out_of_scope$', views.out_of_scope_findings, - name='product_out_of_scope_findings'), - re_path(r'^product/(?P\d+)/finding/inactive$', views.inactive_findings, - name='product_inactive_findings'), - re_path(r'^product/(?P\d+)/finding/all$', views.findings, {'view': 'All'}, - name='product_all_findings'), - re_path(r'^engagement/(?P\d+)/finding/open$', views.open_findings, - name='engagement_open_findings'), - re_path(r'^engagement/(?P\d+)/finding/closed$', views.closed_findings, - name='engagement_closed_findings'), - re_path(r'^engagement/(?P\d+)/finding/verified$', views.verified_findings, - name='engagement_verified_findings'), - re_path(r'^engagement/(?P\d+)/finding/accepted$', views.accepted_findings, - name='engagement_accepted_findings'), - re_path(r'^engagement/(?P\d+)/finding/all$', views.findings, {'view': 'All'}, - name='engagement_all_findings'), - re_path(r'^product/(?P\d+)/finding/closed$', views.closed_findings, - name='product_closed_findings'), - re_path(r'^product/(?P\d+)/finding/false_positive$', views.false_positive_findings, - name='product_false_positive_findings'), - re_path(r'^product/(?P\d+)/finding/accepted$', views.accepted_findings, - name='product_accepted_findings'), - re_path(r'^finding/closed$', views.closed_findings, - name='closed_findings'), - re_path(r'^finding/accepted', views.accepted_findings, - name='accepted_findings'), - re_path(r'^finding/(?P\d+)$', views.view_finding, - name='view_finding'), - re_path(r'^finding/(?P\d+)/edit$', - views.edit_finding, name='edit_finding'), - re_path(r'^finding/(?P\d+)/touch', + re_path(r'^finding/(?P\d+)/touch$', views.touch_finding, name='touch_finding'), - re_path(r'^finding/(?P\d+)/simple_risk_accept', + re_path(r'^finding/(?P\d+)/simple_risk_accept$', views.simple_risk_accept, name='simple_risk_accept_finding'), - re_path(r'^finding/(?P\d+)/simple_risk_unaccept', + re_path(r'^finding/(?P\d+)/simple_risk_unaccept$', views.risk_unaccept, name='risk_unaccept_finding'), - re_path(r'^finding/(?P\d+)/request_review', + re_path(r'^finding/(?P\d+)/request_review$', views.request_finding_review, name='request_finding_review'), - re_path(r'^finding/(?P\d+)/review', + re_path(r'^finding/(?P\d+)/review$', views.clear_finding_review, name='clear_finding_review'), - re_path(r'^finding/(?P\d+)/delete$', - views.delete_finding, name='delete_finding'), re_path(r'^finding/(?P\d+)/copy$', views.copy_finding, name='copy_finding'), re_path(r'^finding/(?P\d+)/apply_cwe$', views.apply_template_cwe, name='apply_template_cwe'), re_path(r'^finding/(?P\d+)/mktemplate$', views.mktemplate, name='mktemplate'), - re_path(r'^finding/(?P\d+)/find_template_to_apply', views.find_template_to_apply, + re_path(r'^finding/(?P\d+)/find_template_to_apply$', views.find_template_to_apply, name='find_template_to_apply'), - re_path(r'^finding/(?P\d+)/(?P\d+)/choose_finding_template_options', views.choose_finding_template_options, + re_path(r'^finding/(?P\d+)/(?P\d+)/choose_finding_template_options$', views.choose_finding_template_options, name='choose_finding_template_options'), - re_path(r'^finding/(?P\d+)/(?P\d+)/apply_template_to_finding', + re_path(r'^finding/(?P\d+)/(?P\d+)/apply_template_to_finding$', views.apply_template_to_finding, name='apply_template_to_finding'), re_path(r'^finding/(?P\d+)/close$', views.close_finding, name='close_finding'), - re_path(r'^finding/(?P\d+)/defect_review', + re_path(r'^finding/(?P\d+)/defect_review$', views.defect_finding_review, name='defect_finding_review'), - re_path(r'^finding/(?P\d+)/open', views.reopen_finding, + re_path(r'^finding/(?P\d+)/open$', views.reopen_finding, name='reopen_finding'), re_path(r'^finding/image/(?P[^/]+)$', views.download_finding_pic, name='download_finding_pic'), @@ -95,12 +162,12 @@ views.reset_finding_duplicate_status, name='reset_finding_duplicate_status'), re_path(r'^finding/(?P\d+)/original/(?P\d+)$', views.set_finding_as_original, name='set_finding_as_original'), - re_path(r'^finding/(?P\d+)/remediation_date', views.remediation_date, + re_path(r'^finding/(?P\d+)/remediation_date$', views.remediation_date, name='remediation_date'), # stub findings re_path(r'^stub_finding/(?P\d+)/add$', views.add_stub_finding, name='add_stub_finding'), - re_path(r'^stub_finding/(?P\d+)/promote', + re_path(r'^stub_finding/(?P\d+)/promote$', views.promote_to_finding, name='promote_to_finding'), re_path(r'^stub_finding/(?P\d+)/delete$', views.delete_stub_finding, name='delete_stub_finding'), @@ -113,13 +180,13 @@ name='add_template'), re_path(r'^template/(?P\d+)/edit$', views.edit_template, name='edit_template'), - re_path(r'^template/(?P\d+)/delete', + re_path(r'^template/(?P\d+)/delete$', views.delete_template, name='delete_template'), re_path(r'^template/export$', views.export_templates_to_json, name='export_template'), - re_path(r'^finding/(?P\d+)/jira/unlink', views.unlink_jira, name='finding_unlink_jira'), - re_path(r'^finding/(?P\d+)/jira/push', views.push_to_jira, name='finding_push_to_jira'), + re_path(r'^finding/(?P\d+)/jira/unlink$', views.unlink_jira, name='finding_unlink_jira'), + re_path(r'^finding/(?P\d+)/jira/push$', views.push_to_jira, name='finding_push_to_jira'), # re_path(r'^finding/(?P\d+)/jira/push', views.finding_link_to_jira, name='finding_link_to_jira'), ] diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 70e38494842..e3d01e64ba3 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -3,6 +3,7 @@ import json import logging import mimetypes +import contextlib from collections import OrderedDict, defaultdict from django.db import models from django.db.models.functions import Length @@ -11,7 +12,7 @@ from django.core.exceptions import PermissionDenied, ValidationError from django.core import serializers from django.urls import reverse -from django.http import Http404, HttpResponse, JsonResponse +from django.http import Http404, HttpResponse, JsonResponse, HttpRequest from django.http import HttpResponseRedirect from django.http import StreamingHttpResponse from django.shortcuts import render, get_object_or_404 @@ -19,6 +20,7 @@ from django.utils.safestring import mark_safe from django.utils import timezone from django.views.decorators.http import require_POST +from django.views import View from itertools import chain from imagekit import ImageSpec from imagekit.processors import ResizeToFill @@ -31,6 +33,7 @@ reopen_external_issue, do_false_positive_history, match_finding_to_existing_findings, + get_page_items_and_count, ) import copy from dojo.filters import ( @@ -38,6 +41,8 @@ SimilarFindingFilter, FindingFilter, AcceptedFindingFilter, + TestImportFindingActionFilter, + TestImportFilter, ) from dojo.forms import ( EditPlannedRemediationDateFindingForm, @@ -81,6 +86,7 @@ Cred_Mapping, Test, Product, + Test_Import, Test_Import_Finding_Action, User, Engagement, @@ -124,184 +130,7 @@ logger = logging.getLogger(__name__) -def get_filtered_findings( - request, - pid=None, - eid=None, - tid=None, - filter_name=None, - order_by="numerical_severity", -): - findings = get_authorized_findings(Permissions.Finding_View) - - findings = findings.order_by(order_by) - - if pid: - findings = findings.filter(test__engagement__product__id=pid) - elif eid: - findings = findings.filter(test__engagement=eid) - elif tid: - findings = findings.filter(test=tid) - - if filter_name == "Open": - findings = findings.filter(finding_helper.OPEN_FINDINGS_QUERY) - elif filter_name == "Verified": - findings = findings.filter(finding_helper.VERIFIED_FINDINGS_QUERY) - elif filter_name == "Out of Scope": - findings = findings.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY) - elif filter_name == "False Positive": - findings = findings.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY) - elif filter_name == "Inactive": - findings = findings.filter(finding_helper.INACTIVE_FINDINGS_QUERY) - elif filter_name == "Accepted": - findings = findings.filter(finding_helper.ACCEPTED_FINDINGS_QUERY) - elif filter_name == "Closed": - findings = findings.filter(finding_helper.CLOSED_FINDINGS_QUERY) - - if filter_name == "Accepted": - findings = AcceptedFindingFilter( - request.GET, findings, user=request.user, pid=pid - ) - else: - findings = FindingFilter(request.GET, findings, user=request.user, pid=pid) - - return findings - - -def open_findings(request, pid=None, eid=None, view=None): - return findings( - request, pid=pid, eid=eid, view=view, filter_name="Open", prefetch_type="open" - ) - - -def verified_findings(request, pid=None, eid=None, view=None): - return findings(request, pid=pid, eid=eid, view=view, filter_name="Verified") - - -def out_of_scope_findings(request, pid=None, eid=None, view=None): - return findings(request, pid=pid, eid=eid, view=view, filter_name="Out of Scope") - - -def false_positive_findings(request, pid=None, eid=None, view=None): - return findings(request, pid=pid, eid=eid, view=view, filter_name="False Positive") - - -def inactive_findings(request, pid=None, eid=None, view=None): - return findings(request, pid=pid, eid=eid, view=view, filter_name="Inactive") - - -def accepted_findings(request, pid=None, eid=None, view=None): - return findings(request, pid=pid, eid=eid, view=view, filter_name="Accepted") - - -def closed_findings(request, pid=None, eid=None, view=None): - return findings( - request, - pid=pid, - eid=eid, - view=view, - filter_name="Closed", - order_by=("-mitigated"), - ) - - -def findings( - request, - pid=None, - eid=None, - view=None, - filter_name=None, - order_by="numerical_severity", - prefetch_type="all", -): - show_product_column = True - custom_breadcrumb = None - product_tab = None - jira_project = None - github_config = None - - if view == "All": - filter_name = "All" - else: - logger.debug("Filtering!: %s", view) - - if pid: - product = get_object_or_404(Product, id=pid) - user_has_permission_or_403(request.user, product, Permissions.Product_View) - show_product_column = False - product_tab = Product_Tab(product, title="Findings", tab="findings") - jira_project = jira_helper.get_jira_project(product) - github_config = GITHUB_PKey.objects.filter(product=pid).first() - - elif eid: - engagement = get_object_or_404(Engagement, id=eid) - user_has_permission_or_403( - request.user, engagement, Permissions.Engagement_View - ) - show_product_column = False - product_tab = Product_Tab( - engagement.product, title=engagement.name, tab="engagements" - ) - jira_project = jira_helper.get_jira_project(engagement) - github_config = GITHUB_PKey.objects.filter(product__engagement=eid).first() - else: - add_breadcrumb( - title="Findings", top_level=not len(request.GET), request=request - ) - - findings_filter = get_filtered_findings( - request, pid, eid, None, filter_name, order_by - ) - - title_words = get_words_for_field(Finding, "title") - component_words = get_words_for_field(Finding, "component_name") - - # trick to prefetch after paging to avoid huge join generated by select count(*) from Paginator - paged_findings = get_page_items(request, findings_filter.qs, 25) - - paged_findings.object_list = prefetch_for_findings( - paged_findings.object_list, prefetch_type - ) - - bulk_edit_form = FindingBulkUpdateForm(request.GET) - - # show custom breadcrumb if user has filtered by exactly 1 endpoint - endpoint = None - if "endpoints" in request.GET: - endpoints = request.GET.getlist("endpoints", []) - if len(endpoints) == 1 and endpoints[0] != '': - endpoint = endpoints[0] - endpoint = get_object_or_404(Endpoint, id=endpoint) - filter_name = "Vulnerable Endpoints" - custom_breadcrumb = OrderedDict( - [ - ("Endpoints", reverse("vulnerable_endpoints")), - (endpoint, reverse("view_endpoint", args=(endpoint.id,))), - ] - ) - - if github_config: - github_config = github_config.git_conf_id - - return render( - request, - "dojo/findings_list.html", - { - "show_product_column": show_product_column, - "product_tab": product_tab, - "findings": paged_findings, - "filtered": findings_filter, - "title_words": title_words, - "component_words": component_words, - "custom_breadcrumb": custom_breadcrumb, - "filter_name": filter_name, - "jira_project": jira_project, - "bulk_edit_form": bulk_edit_form, - }, - ) - - -def prefetch_for_findings(findings, prefetch_type="all"): +def prefetch_for_findings(findings, prefetch_type="all", exclude_untouched=True): prefetched_findings = findings if isinstance( findings, QuerySet @@ -334,15 +163,20 @@ def prefetch_for_findings(findings, prefetch_type="all"): "duplicate_finding" ) - # filter out noop reimport actions from finding status history - prefetched_findings = prefetched_findings.prefetch_related( - Prefetch( - "test_import_finding_action_set", - queryset=Test_Import_Finding_Action.objects.exclude( - action=IMPORT_UNTOUCHED_FINDING - ), + if exclude_untouched: + # filter out noop reimport actions from finding status history + prefetched_findings = prefetched_findings.prefetch_related( + Prefetch( + "test_import_finding_action_set", + queryset=Test_Import_Finding_Action.objects.exclude( + action=IMPORT_UNTOUCHED_FINDING + ), + ) + ) + else: + prefetched_findings = prefetched_findings.prefetch_related( + "test_import_finding_action_set" ) - ) """ we could try to prefetch only the latest note with SubQuery and OuterRef, but I'm getting that MySql doesn't support limits in subqueries. @@ -426,189 +260,940 @@ def prefetch_for_similar_findings(findings): return prefetched_findings -@user_is_authorized(Finding, Permissions.Finding_View, "fid") -def view_finding(request, fid): - finding_qs = prefetch_for_findings(Finding.objects.all()) - finding = get_object_or_404(finding_qs, id=fid) - findings = ( - Finding.objects.filter(test=finding.test) - .order_by("numerical_severity") - .values_list("id", flat=True) - ) - logger.debug(findings) - try: - prev_finding_id = findings[(list(findings).index(finding.id)) - 1] - except (AssertionError, ValueError): - prev_finding_id = finding.id - try: - next_finding_id = findings[(list(findings).index(finding.id)) + 1] - except (IndexError, ValueError): +class BaseListFindings: + def __init__( + self, + filter_name: str = "All", + product_id: int = None, + engagement_id: int = None, + test_id: int = None, + order_by: str = "numerical_severity", + prefetch_type: str = "all", + ): + self.filter_name = filter_name + self.product_id = product_id + self.engagement_id = engagement_id + self.test_id = test_id + self.order_by = order_by + self.prefetch_type = prefetch_type + + def get_filter_name(self): + if not hasattr(self, "filter_name"): + self.filter_name = "All" + return self.filter_name + + def get_order_by(self): + if not hasattr(self, "order_by"): + self.order_by = "numerical_severity" + return self.order_by + + def get_prefetch_type(self): + if not hasattr(self, "prefetch_type"): + self.prefetch_type = "all" + return self.prefetch_type + + def get_product_id(self): + if not hasattr(self, "product_id"): + self.product_id = None + return self.product_id + + def get_engagement_id(self): + if not hasattr(self, "engagement_id"): + self.engagement_id = None + return self.engagement_id + + def get_test_id(self): + if not hasattr(self, "test_id"): + self.test_id = None + return self.test_id + + def filter_findings_by_object(self, findings: QuerySet[Finding]): + if product_id := self.get_product_id(): + return findings.filter(test__engagement__product__id=product_id) + elif engagement_id := self.get_engagement_id(): + return findings.filter(test__engagement=engagement_id) + elif test_id := self.get_test_id(): + return findings.filter(test=test_id) + else: + return findings + + def filter_findings_by_filter_name(self, findings: QuerySet[Finding]): + filter_name = self.get_filter_name() + if filter_name == "Open": + return findings.filter(finding_helper.OPEN_FINDINGS_QUERY) + elif filter_name == "Verified": + return findings.filter(finding_helper.VERIFIED_FINDINGS_QUERY) + elif filter_name == "Out of Scope": + return findings.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY) + elif filter_name == "False Positive": + return findings.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY) + elif filter_name == "Inactive": + return findings.filter(finding_helper.INACTIVE_FINDINGS_QUERY) + elif filter_name == "Accepted": + return findings.filter(finding_helper.ACCEPTED_FINDINGS_QUERY) + elif filter_name == "Closed": + return findings.filter(finding_helper.CLOSED_FINDINGS_QUERY) + else: + return findings + + def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Finding]): + # Set up the args for the form + args = [request.GET, findings] + # Set the initial form args + kwargs = { + "user": request.user, + "pid": self.get_product_id(), + } + + return ( + AcceptedFindingFilter(*args, **kwargs) + if self.get_filter_name() == "Accepted" + else FindingFilter(*args, **kwargs) + ) + + def get_filtered_findings(self): + findings = get_authorized_findings(Permissions.Finding_View).order_by(self.get_order_by()) + findings = self.filter_findings_by_object(findings) + findings = self.filter_findings_by_filter_name(findings) + + return findings + + def get_fully_filtered_findings(self, request: HttpRequest): + findings = self.get_filtered_findings() + return self.filter_findings_by_form(request, findings) + + +class ListFindings(View, BaseListFindings): + def get_initial_context(self, request: HttpRequest): + context = { + "filter_name": self.get_filter_name(), + "show_product_column": True, + "custom_breadcrumb": None, + "product_tab": None, + "jira_project": None, + "github_config": None, + "bulk_edit_form": FindingBulkUpdateForm(request.GET), + "title_words": get_words_for_field(Finding, "title"), + "component_words": get_words_for_field(Finding, "component_name"), + } + # Look to see if the product was used + if product_id := self.get_product_id(): + product = get_object_or_404(Product, id=product_id) + user_has_permission_or_403(request.user, product, Permissions.Product_View) + context["show_product_column"] = False + context["product_tab"] = Product_Tab(product, title="Findings", tab="findings") + context["jira_project"] = jira_helper.get_jira_project(product) + if github_config := GITHUB_PKey.objects.filter(product=product).first(): + context["github_config"] = github_config.git_conf_id + elif engagement_id := self.get_engagement_id(): + engagement = get_object_or_404(Engagement, id=engagement_id) + user_has_permission_or_403(request.user, engagement, Permissions.Engagement_View) + context["show_product_column"] = False + context["product_tab"] = Product_Tab(engagement.product, title=engagement.name, tab="engagements") + context["jira_project"] = jira_helper.get_jira_project(engagement) + if github_config := GITHUB_PKey.objects.filter(product__engagement=engagement).first(): + context["github_config"] = github_config.git_conf_id + + return request, context + + def get_template(self): + return "dojo/findings_list.html" + + def add_breadcrumbs(self, request: HttpRequest, context: dict): + # show custom breadcrumb if user has filtered by exactly 1 endpoint + if "endpoints" in request.GET: + endpoint_ids = request.GET.getlist("endpoints", []) + if len(endpoint_ids) == 1 and endpoint_ids[0] != '': + endpoint_id = endpoint_ids[0] + endpoint = get_object_or_404(Endpoint, id=endpoint_id) + context["filter_name"] = "Vulnerable Endpoints" + context["custom_breadcrumb"] = OrderedDict( + [ + ("Endpoints", reverse("vulnerable_endpoints")), + (endpoint, reverse("view_endpoint", args=(endpoint.id,))), + ] + ) + # Show the "All findings" breadcrumb if nothing is coming from the product or engagement + elif not self.get_engagement_id() and not self.get_product_id(): + add_breadcrumb(title="Findings", top_level=not len(request.GET), request=request) + + return request, context + + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + # Store the product and engagement ids + self.product_id = product_id + self.engagement_id = engagement_id + # Get the initial context + request, context = self.get_initial_context(request) + # Get the filtered findings + filtered_findings = self.get_fully_filtered_findings(request) + # trick to prefetch after paging to avoid huge join generated by select count(*) from Paginator + paged_findings = get_page_items(request, filtered_findings.qs, 25) + # prefetch the related objects in the findings + paged_findings.object_list = prefetch_for_findings( + paged_findings.object_list, + self.get_prefetch_type()) + # Add some breadcrumbs + request, context = self.add_breadcrumbs(request, context) + # Add the filtered and paged findings into the context + context |= { + "findings": paged_findings, + "filtered": filtered_findings, + } + # Render the view + return render(request, self.get_template(), context) + + +class ListOpenFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Open" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListVerifiedFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Verified" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListOutOfScopeFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Out of Scope" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListFalsePositiveFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "False Positive" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListInactiveFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Inactive" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListAcceptedFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Accepted" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ListClosedFindings(ListFindings): + def get(self, request: HttpRequest, product_id: int = None, engagement_id: int = None): + self.filter_name = "Closed" + self.order_by = "-mitigated" + return super().get(request, product_id=product_id, engagement_id=engagement_id) + + +class ViewFinding(View): + def get_finding(self, finding_id: int): + finding_qs = prefetch_for_findings(Finding.objects.all(), exclude_untouched=False) + return get_object_or_404(finding_qs, id=finding_id) + + def get_dojo_user(self, request: HttpRequest): + user = request.user + return get_object_or_404(Dojo_User, id=user.id) + + def get_previous_and_next_findings(self, finding: Finding): + # Get the whole list of findings in the current test + findings = ( + Finding.objects.filter(test=finding.test) + .order_by("numerical_severity") + .values_list("id", flat=True) + ) + logger.debug(findings) + # Set some reasonable defaults next_finding_id = finding.id + prev_finding_id = finding.id + last_pos = (len(findings)) - 1 + # get the index of the current finding + current_finding_index = list(findings).index(finding.id) + # Try to get the previous ID + with contextlib.suppress(IndexError, ValueError): + prev_finding_id = findings[current_finding_index - 1] + # Try to get the next ID + with contextlib.suppress(IndexError, ValueError): + next_finding_id = findings[current_finding_index + 1] + + return { + "prev_finding_id": prev_finding_id, + "next_finding_id": next_finding_id, + "findings_list": findings, + "findings_list_lastElement": findings[last_pos], + } - cred_finding = ( - Cred_Mapping.objects.filter(finding=finding.id) - .select_related("cred_id") - .order_by("cred_id") - ) - creds = ( - Cred_Mapping.objects.filter(test=finding.test.id) - .select_related("cred_id") - .order_by("cred_id") - ) - cred_engagement = ( - Cred_Mapping.objects.filter(engagement=finding.test.engagement.id) - .select_related("cred_id") - .order_by("cred_id") - ) - user = request.user - cwe_template = None - try: - cwe_template = Finding_Template.objects.filter(cwe=finding.cwe).first() - except Finding_Template.DoesNotExist: - pass + def get_credential_objects(self, finding: Finding): + cred = ( + Cred_Mapping.objects.filter(test=finding.test.id) + .select_related("cred_id") + .order_by("cred_id") + ) + cred_engagement = ( + Cred_Mapping.objects.filter(engagement=finding.test.engagement.id) + .select_related("cred_id") + .order_by("cred_id") + ) + cred_finding = ( + Cred_Mapping.objects.filter(finding=finding.id) + .select_related("cred_id") + .order_by("cred_id") + ) - dojo_user = get_object_or_404(Dojo_User, id=user.id) + return { + "cred_finding": cred_finding, + "cred": cred, + "cred_engagement": cred_engagement, + } - notes = finding.notes.all() - files = finding.files.all() - note_type_activation = Note_Type.objects.filter(is_active=True).count() - if note_type_activation: - available_note_types = find_available_notetypes(notes) - if request.method == "POST": - user_has_permission_or_403(request.user, finding, Permissions.Note_Add) - if note_type_activation: - form = TypedNoteForm( - request.POST, available_note_types=available_note_types + def get_cwe_template(self, finding: Finding): + cwe_template = None + with contextlib.suppress(Finding_Template.DoesNotExist): + cwe_template = Finding_Template.objects.filter(cwe=finding.cwe).first() + + return { + "cwe_template": cwe_template + } + + def get_request_response(self, finding: Finding): + request_response = None + burp_request = None + burp_response = None + try: + request_response = BurpRawRequestResponse.objects.filter(finding=finding).first() + if request_response is not None: + burp_request = base64.b64decode(request_response.burpRequestBase64) + burp_response = base64.b64decode(request_response.burpResponseBase64) + except Exception as e: + logger.debug(f"unsuspected error: {e}") + + return { + "burp_request": burp_request, + "burp_response": burp_response, + } + + def get_test_import_data(self, request: HttpRequest, finding: Finding): + test_imports = Test_Import.objects.filter(findings_affected=finding) + test_import_filter = TestImportFilter(request.GET, test_imports) + + test_import_finding_actions = finding.test_import_finding_action_set + test_import_finding_actions_count = test_import_finding_actions.all().count() + test_import_finding_actions = test_import_finding_actions.filter(test_import__in=test_import_filter.qs) + test_import_finding_action_filter = TestImportFindingActionFilter(request.GET, test_import_finding_actions) + + paged_test_import_finding_actions = get_page_items_and_count(request, test_import_finding_action_filter.qs, 5, prefix='test_import_finding_actions') + paged_test_import_finding_actions.object_list = paged_test_import_finding_actions.object_list.prefetch_related('test_import') + + latest_test_import_finding_action = finding.test_import_finding_action_set.order_by('-created').first + + return { + "test_import_filter": test_import_filter, + "test_import_finding_action_filter": test_import_finding_action_filter, + "paged_test_import_finding_actions": paged_test_import_finding_actions, + "latest_test_import_finding_action": latest_test_import_finding_action, + "test_import_finding_actions_count": test_import_finding_actions_count, + } + + def get_similar_findings(self, request: HttpRequest, finding: Finding): + # add related actions for non-similar and non-duplicate cluster members + finding.related_actions = calculate_possible_related_actions_for_similar_finding( + request, finding, finding + ) + if finding.duplicate_finding: + finding.duplicate_finding.related_actions = ( + calculate_possible_related_actions_for_similar_finding( + request, finding, finding.duplicate_finding + ) ) - else: - form = NoteForm(request.POST) - if form.is_valid(): - new_note = form.save(commit=False) + similar_findings_filter = SimilarFindingFilter( + request.GET, + queryset=get_authorized_findings(Permissions.Finding_View), + user=request.user, + finding=finding, + ) + logger.debug("similar query: %s", similar_findings_filter.qs.query) + similar_findings = get_page_items( + request, + similar_findings_filter.qs, + settings.SIMILAR_FINDINGS_MAX_RESULTS, + prefix="similar", + ) + similar_findings.object_list = prefetch_for_similar_findings( + similar_findings.object_list + ) + for similar_finding in similar_findings: + similar_finding.related_actions = ( + calculate_possible_related_actions_for_similar_finding( + request, finding, similar_finding + ) + ) + + return { + "duplicate_cluster": duplicate_cluster(request, finding), + "similar_findings": similar_findings, + "similar_findings_filter": similar_findings_filter, + } + + def get_jira_data(self, finding: Finding): + ( + can_be_pushed_to_jira, + can_be_pushed_to_jira_error, + error_code, + ) = jira_helper.can_be_pushed_to_jira(finding) + # Check the error code + if error_code: + logger.error(error_code) + + return { + "can_be_pushed_to_jira": can_be_pushed_to_jira, + "can_be_pushed_to_jira_error": can_be_pushed_to_jira_error, + } + + def get_note_form(self, request: HttpRequest): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = {} + + return NoteForm(*args, **kwargs) + + def get_typed_note_form(self, request: HttpRequest, context: dict): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "available_note_types": context.get("available_note_types") + } + + return TypedNoteForm(*args, **kwargs) + + def get_form(self, request: HttpRequest, context: dict): + return ( + self.get_typed_note_form(request, context) + if context.get("note_type_activation", 0) + else self.get_note_form(request) + ) + + def process_form(self, request: HttpRequest, finding: Finding, context: dict): + if context["form"].is_valid(): + # Create the note object + new_note = context["form"].save(commit=False) new_note.author = request.user new_note.date = timezone.now() new_note.save() + # Add an entry to the note history history = NoteHistory( data=new_note.entry, time=new_note.date, current_editor=new_note.author ) history.save() new_note.history.add(history) + # Associate the note with the finding finding.notes.add(new_note) finding.last_reviewed = new_note.date - finding.last_reviewed_by = user + finding.last_reviewed_by = context["user"] finding.save() - + # Determine if the note should be sent to jira if finding.has_jira_issue: jira_helper.add_comment(finding, new_note) elif finding.has_jira_group_issue: jira_helper.add_comment(finding.finding_group, new_note) - - if note_type_activation: - form = TypedNoteForm(available_note_types=available_note_types) - else: - form = NoteForm() + # Send the notification of the note being added url = request.build_absolute_uri( reverse("view_finding", args=(finding.id,)) ) - title = "Finding: " + finding.title + title = f"Finding: {finding.title}" process_notifications(request, new_note, url, title) + # Add a message to the request messages.add_message( request, messages.SUCCESS, "Note saved.", extra_tags="alert-success" ) - return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) - else: + + return request, True + + return request, False + + def get_initial_context(self, request: HttpRequest, finding: Finding, user: Dojo_User): + notes = finding.notes.all() + note_type_activation = Note_Type.objects.filter(is_active=True).count() + available_note_types = None if note_type_activation: - form = TypedNoteForm(available_note_types=available_note_types) - else: - form = NoteForm() + available_note_types = find_available_notetypes(notes) + # Set the current context + context = { + "finding": finding, + "dojo_user": user, + "user": request.user, + "notes": notes, + "files": finding.files.all(), + "note_type_activation": note_type_activation, + "available_note_types": available_note_types, + "product_tab": Product_Tab( + finding.test.engagement.product, title="View Finding", tab="findings" + ) + } + # Set the form using the context, and then update the context + form = self.get_form(request, context) + context["form"] = form + + return context + + def get_template(self): + return "dojo/view_finding.html" + + def get(self, request: HttpRequest, finding_id: int): + # Get the initial objects + finding = self.get_finding(finding_id) + user = self.get_dojo_user(request) + # Make sure the user is authorized + user_has_permission_or_403(user, finding, Permissions.Finding_View) + # Set up the initial context + context = self.get_initial_context(request, finding, user) + # Add in the other extras + context |= self.get_previous_and_next_findings(finding) + context |= self.get_credential_objects(finding) + context |= self.get_cwe_template(finding) + # Add in more of the other extras + context |= self.get_request_response(finding) + context |= self.get_similar_findings(request, finding) + context |= self.get_test_import_data(request, finding) + context |= self.get_jira_data(finding) + # Render the form + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest, finding_id): + # Get the initial objects + finding = self.get_finding(finding_id) + user = self.get_dojo_user(request) + # Make sure the user is authorized + user_has_permission_or_403(user, finding, Permissions.Finding_View) + # Quick perms check to determine if the user has access to add a note to the finding + user_has_permission_or_403(user, finding, Permissions.Note_Add) + # Set up the initial context + context = self.get_initial_context(request, finding, user) + # Determine the validity of the form + request, success = self.process_form(request, finding, context) + # Handle the case of a successful form + if success: + return HttpResponseRedirect(reverse("view_finding", args=(finding_id,))) + # Add in more of the other extras + context |= self.get_request_response(finding) + context |= self.get_similar_findings(request, finding) + context |= self.get_test_import_data(request, finding) + context |= self.get_jira_data(finding) + # Render the form + return render(request, self.get_template(), context) + + +class EditFinding(View): + def get_finding(self, finding_id: int): + return get_object_or_404(Finding, id=finding_id) + + def get_request_response(self, finding: Finding): + req_resp = None + if burp_rr := BurpRawRequestResponse.objects.filter(finding=finding).first(): + req_resp = (burp_rr.get_request(), burp_rr.get_response()) + + return req_resp + + def get_finding_form(self, request: HttpRequest, finding: Finding): + # Get the burp request if available + req_resp = self.get_request_response(finding) + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "instance": finding, + "req_resp": req_resp, + "can_edit_mitigated_data": finding_helper.can_edit_mitigated_data(request.user), + "initial": {"vulnerability_ids": "\n".join(finding.vulnerability_ids)}, + } + + return FindingForm(*args, **kwargs) + + def get_jira_form(self, request: HttpRequest, finding: Finding, finding_form: FindingForm = None): + # Determine if jira should be used + if (jira_project := jira_helper.get_jira_project(finding)) is not None: + # Determine if push all findings is enabled + push_all_findings = jira_helper.is_push_all_issues(finding) + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "push_all": push_all_findings, + "prefix": "jiraform", + "instance": finding, + "jira_project": jira_project, + "finding_form": finding_form, + } - reqres = None - burp_request = None - burp_response = None - try: - reqres = BurpRawRequestResponse.objects.filter(finding=finding).first() - if reqres is not None: - burp_request = base64.b64decode(reqres.burpRequestBase64) - burp_response = base64.b64decode(reqres.burpResponseBase64) - except Exception as e: - logger.debug(f"unespect error: {e}") + return JIRAFindingForm(*args, **kwargs) + return None - # add related actions for non-similar and non-duplicate cluster members - finding.related_actions = calculate_possible_related_actions_for_similar_finding( - request, finding, finding - ) - if finding.duplicate_finding: - finding.duplicate_finding.related_actions = ( - calculate_possible_related_actions_for_similar_finding( - request, finding, finding.duplicate_finding + def get_github_form(self, request: HttpRequest, finding: Finding): + # Determine if github should be used + if get_system_setting("enable_github"): + # Ensure there is a github conf correctly configured for the product + config_present = GITHUB_PKey.objects.filter(product=finding.test.engagement.product) + if config_present := config_present.exclude(git_conf_id=None): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "enabled": finding.has_github_issue(), + "prefix": "githubform" + } + + return GITHUBFindingForm(*args, **kwargs) + return None + + def get_initial_context(self, request: HttpRequest, finding: Finding): + # Get the finding form first since it is used in another place + finding_form = self.get_finding_form(request, finding) + return { + "form": finding_form, + "finding": finding, + "jform": self.get_jira_form(request, finding, finding_form=finding_form), + "gform": self.get_github_form(request, finding), + "return_url": get_return_url(request), + "product_tab": Product_Tab( + finding.test.engagement.product, title="Edit Finding", tab="findings" + ) + } + + def validate_status_change(self, request: HttpRequest, finding: Finding, context: dict): + # If the finding is already not active, skip this extra validation + if not finding.active: + return request + # Validate the proper notes are added for mitigation + if (not context["form"]["active"].value() or context["form"]["false_p"].value() or context["form"]["out_of_scope"].value()) and not context["form"]["duplicate"].value(): + note_type_activation = Note_Type.objects.filter(is_active=True).count() + closing_disabled = 0 + if note_type_activation: + closing_disabled = len(get_missing_mandatory_notetypes(finding)) + if closing_disabled != 0: + error_inactive = ValidationError( + "Can not set a finding as inactive without adding all mandatory notes", + code="inactive_without_mandatory_notes", + ) + error_false_p = ValidationError( + "Can not set a finding as false positive without adding all mandatory notes", + code="false_p_without_mandatory_notes", + ) + error_out_of_scope = ValidationError( + "Can not set a finding as out of scope without adding all mandatory notes", + code="out_of_scope_without_mandatory_notes", + ) + if context["form"]["active"].value() is False: + context["form"].add_error("active", error_inactive) + if context["form"]["false_p"].value(): + context["form"].add_error("false_p", error_false_p) + if context["form"]["out_of_scope"].value(): + context["form"].add_error("out_of_scope", error_out_of_scope) + messages.add_message( + request, + messages.ERROR, + ("Can not set a finding as inactive, " + "false positive or out of scope without adding all mandatory notes"), + extra_tags="alert-danger", + ) + + return request + + def process_mitigated_data(self, request: HttpRequest, finding: Finding, context: dict): + # If active is not checked and CAN_EDIT_MITIGATED_DATA, + # mitigate the finding and the associated endpoints status + if finding_helper.can_edit_mitigated_data(request.user) and (( + context["form"]["active"].value() is False + or context["form"]["false_p"].value() + or context["form"]["out_of_scope"].value() + ) and context["form"]["duplicate"].value() is False): + now = timezone.now() + finding.is_mitigated = True + endpoint_status = finding.status_finding.all() + for status in endpoint_status: + status.mitigated_by = ( + context["form"].cleaned_data.get("mitigated_by") or request.user + ) + status.mitigated_time = ( + context["form"].cleaned_data.get("mitigated") or now + ) + status.mitigated = True + status.last_modified = timezone.now() + status.save() + + def process_false_positive_history(self, finding: Finding): + if get_system_setting("false_positive_history", False): + # If the finding is being marked as a false positive we dont need to call the + # fp history function because it will be called by the save function + # If finding was a false positive and is being reactivated: retroactively reactivates all equal findings + if finding.false_p and not finding.false_p and get_system_setting("retroactive_false_positive_history"): + logger.debug('FALSE_POSITIVE_HISTORY: Reactivating existing findings based on: %s', finding) + + existing_fp_findings = match_finding_to_existing_findings( + finding, product=finding.test.engagement.product + ).filter(false_p=True) + + for fp in existing_fp_findings: + logger.debug('FALSE_POSITIVE_HISTORY: Reactivating false positive %i: %s', fp.id, fp) + fp.active = finding.active + fp.verified = finding.verified + fp.false_p = False + fp.out_of_scope = finding.out_of_scope + fp.is_mitigated = finding.is_mitigated + fp.save_no_options() + + def process_burp_request_response(self, finding: Finding, context: dict): + if "request" in context["form"].cleaned_data or "response" in context["form"].cleaned_data: + try: + burp_rr, _ = BurpRawRequestResponse.objects.get_or_create(finding=finding) + except BurpRawRequestResponse.MultipleObjectsReturned: + burp_rr = BurpRawRequestResponse.objects.filter(finding=finding).first() + burp_rr.burpRequestBase64 = base64.b64encode( + context["form"].cleaned_data["request"].encode() + ) + burp_rr.burpResponseBase64 = base64.b64encode( + context["form"].cleaned_data["response"].encode() + ) + burp_rr.clean() + burp_rr.save() + + def process_finding_form(self, request: HttpRequest, finding: Finding, context: dict): + if context["form"].is_valid(): + # process some of the easy stuff first + new_finding = context["form"].save(commit=False) + new_finding.test = finding.test + new_finding.numerical_severity = Finding.get_numerical_severity(new_finding.severity) + new_finding.last_reviewed = timezone.now() + new_finding.last_reviewed_by = request.user + new_finding.tags = context["form"].cleaned_data["tags"] + # Handle group related things + if "group" in context["form"].cleaned_data: + finding_group = context["form"].cleaned_data["group"] + finding_helper.update_finding_group(new_finding, finding_group) + # Handle risk exception related things + if "risk_accepted" in context["form"].cleaned_data and context["form"]["risk_accepted"].value(): + if new_finding.test.engagement.product.enable_simple_risk_acceptance: + ra_helper.simple_risk_accept(new_finding, perform_save=False) + else: + if new_finding.risk_accepted: + ra_helper.risk_unaccept(new_finding, perform_save=False) + # Save and add new endpoints + finding_helper.add_endpoints(new_finding, context["form"]) + # Remove unrelated endpoints + endpoint_status_list = Endpoint_Status.objects.filter(finding=new_finding) + for endpoint_status in endpoint_status_list: + if endpoint_status.endpoint not in new_finding.endpoints.all(): + endpoint_status.delete() + # Handle some of the other steps + self.process_mitigated_data(request, new_finding, context) + self.process_false_positive_history(new_finding) + self.process_burp_request_response(new_finding, context) + # Save the vulnerability IDs + finding_helper.save_vulnerability_ids(new_finding, context["form"].cleaned_data["vulnerability_ids"].split()) + # Add a success message + messages.add_message( + request, + messages.SUCCESS, + "Finding saved successfully.", + extra_tags="alert-success", ) - ) - similar_findings_filter = SimilarFindingFilter( - request.GET, - queryset=get_authorized_findings(Permissions.Finding_View), - user=request.user, - finding=finding, - ) - logger.debug("similar query: %s", similar_findings_filter.qs.query) + return finding, request, True + else: + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) - similar_findings = get_page_items( - request, - similar_findings_filter.qs, - settings.SIMILAR_FINDINGS_MAX_RESULTS, - prefix="similar", - ) + return finding, request, False - similar_findings.object_list = prefetch_for_similar_findings( - similar_findings.object_list - ) + def process_jira_form(self, request: HttpRequest, finding: Finding, context: dict): + # Capture case if the jira not being enabled + if context["jform"] is None: + return request, True, False - for similar_finding in similar_findings: - similar_finding.related_actions = ( - calculate_possible_related_actions_for_similar_finding( - request, finding, similar_finding + if context["jform"] and context["jform"].is_valid(): + jira_message = None + logger.debug("jform.jira_issue: %s", context["jform"].cleaned_data.get("jira_issue")) + logger.debug(JFORM_PUSH_TO_JIRA_MESSAGE, context["jform"].cleaned_data.get("push_to_jira")) + # can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false + push_all_jira_issues = jira_helper.is_push_all_issues(finding) + push_to_jira = push_all_jira_issues or context["jform"].cleaned_data.get("push_to_jira") + logger.debug("push_to_jira: %s", push_to_jira) + logger.debug("push_all_jira_issues: %s", push_all_jira_issues) + logger.debug("has_jira_group_issue: %s", finding.has_jira_group_issue) + # if the jira issue key was changed, update database + new_jira_issue_key = context["jform"].cleaned_data.get("jira_issue") + # we only support linking / changing if there is no group issue + if not finding.has_jira_group_issue: + if finding.has_jira_issue: + """ + everything in DD around JIRA integration is based on the internal id + of the issue in JIRA instead of on the public jira issue key. + I have no idea why, but it means we have to retrieve the issue from JIRA + to get the internal JIRA id. we can assume the issue exist, + which is already checked in the validation of the form + """ + if not new_jira_issue_key: + jira_helper.finding_unlink_jira(request, finding) + jira_message = "Link to JIRA issue removed successfully." + elif new_jira_issue_key != finding.jira_issue.jira_key: + jira_helper.finding_unlink_jira(request, finding) + jira_helper.finding_link_jira(request, finding, new_jira_issue_key) + jira_message = "Changed JIRA link successfully." + else: + if new_jira_issue_key: + jira_helper.finding_link_jira(request, finding, new_jira_issue_key) + jira_message = "Linked a JIRA issue successfully." + # any existing finding should be updated + push_to_jira = ( + push_to_jira + and not (push_to_jira and finding.finding_group) + and (finding.has_jira_issue or jira_helper.get_jira_instance(finding).finding_jira_sync) ) - ) + # Determine if a message should be added + if jira_message: + messages.add_message( + request, messages.SUCCESS, jira_message, extra_tags="alert-success" + ) - product_tab = Product_Tab( - finding.test.engagement.product, title="View Finding", tab="findings" - ) + return request, True, push_to_jira + else: + add_field_errors_to_response(context["jform"]) - ( - can_be_pushed_to_jira, - can_be_pushed_to_jira_error, - error_code, - ) = jira_helper.can_be_pushed_to_jira(finding) + return request, False, False - last_pos = (len(findings)) - 1 - return render( - request, - "dojo/view_finding.html", - { - "product_tab": product_tab, - "finding": finding, - "burp_request": burp_request, - "cred_finding": cred_finding, - "creds": creds, - "cred_engagement": cred_engagement, - "burp_response": burp_response, - "dojo_user": dojo_user, - "user": user, - "notes": notes, - "files": files, - "form": form, - "cwe_template": cwe_template, - "found_by": finding.found_by.all().distinct(), - "findings_list": findings, - "findings_list_lastElement": findings[last_pos], - "prev_finding_id": prev_finding_id, - "next_finding_id": next_finding_id, - "duplicate_cluster": duplicate_cluster(request, finding), - "similar_findings": similar_findings, - "similar_findings_filter": similar_findings_filter, - "can_be_pushed_to_jira": can_be_pushed_to_jira, - "can_be_pushed_to_jira_error": can_be_pushed_to_jira_error, - }, - ) + def process_github_form(self, request: HttpRequest, finding: Finding, context: dict, old_status: str): + if "githubform-push_to_github" not in request.POST: + return request, True + + if context["gform"].is_valid(): + if GITHUB_Issue.objects.filter(finding=finding).exists(): + update_external_issue(finding, old_status, "github") + else: + add_external_issue(finding, "github") + + return request, True + else: + add_field_errors_to_response(context["gform"]) + + return request, False + + def process_forms(self, request: HttpRequest, finding: Finding, context: dict): + form_success_list = [] + # Set vars for the completed forms + old_status = finding.status() + old_finding = copy.copy(finding) + # Validate finding mitigation + request = self.validate_status_change(request, finding, context) + # Check the validity of the form overall + new_finding, request, success = self.process_finding_form(request, finding, context) + form_success_list.append(success) + request, success, push_to_jira = self.process_jira_form(request, new_finding, context) + form_success_list.append(success) + request, success = self.process_github_form(request, new_finding, context, old_status) + form_success_list.append(success) + # Determine if all forms were successful + all_forms_valid = all(form_success_list) + # Check the validity of all the forms + if all_forms_valid: + # if we're removing the "duplicate" in the edit finding screen + # do not relaunch deduplication, otherwise, it's never taken into account + if old_finding.duplicate and not new_finding.duplicate: + new_finding.duplicate_finding = None + new_finding.save(push_to_jira=push_to_jira, dedupe_option=False) + else: + new_finding.save(push_to_jira=push_to_jira) + # we only push the group after storing the finding to make sure + # the updated data of the finding is pushed as part of the group + if push_to_jira and finding.finding_group: + jira_helper.push_to_jira(finding.finding_group) + + return request, all_forms_valid + + def get_template(self): + return "dojo/edit_finding.html" + + def get(self, request: HttpRequest, finding_id: int): + # Get the initial objects + finding = self.get_finding(finding_id) + # Make sure the user is authorized + user_has_permission_or_403(request.user, finding, Permissions.Finding_Edit) + # Set up the initial context + context = self.get_initial_context(request, finding) + # Render the form + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest, finding_id: int): + # Get the initial objects + finding = self.get_finding(finding_id) + # Make sure the user is authorized + user_has_permission_or_403(request.user, finding, Permissions.Finding_Edit) + # Set up the initial context + context = self.get_initial_context(request, finding) + # Process the form + request, success = self.process_forms(request, finding, context) + # Handle the case of a successful form + if success: + return redirect_to_return_url_or_else(request, reverse("view_finding", args=(finding_id,))) + # Render the form + return render(request, self.get_template(), context) + + +class DeleteFinding(View): + def get_finding(self, finding_id: int): + return get_object_or_404(Finding, id=finding_id) + + def process_form(self, request: HttpRequest, finding: Finding, context: dict): + if context["form"].is_valid(): + product = finding.test.engagement.product + finding.delete() + # Update the grade of the product async + calculate_grade(product) + # Add a message to the request that the finding was successfully deleted + messages.add_message( + request, + messages.SUCCESS, + "Finding deleted successfully.", + extra_tags="alert-success", + ) + # Send a notification that the finding had been deleted + create_notification( + event="other", + title=f"Deletion of {finding.title}", + description=f'The finding "{finding.title}" was deleted by {request.user}', + product=product, + url=request.build_absolute_uri(reverse("all_findings")), + recipients=[finding.test.engagement.lead], + icon="exclamation-triangle", + ) + # return the request + return request, True + + # Add a failure message + messages.add_message( + request, + messages.ERROR, + "Unable to delete finding, please try again.", + extra_tags="alert-danger", + ) + + return request, False + + def post(self, request: HttpRequest, finding_id): + # Get the initial objects + finding = self.get_finding(finding_id) + # Make sure the user is authorized + user_has_permission_or_403(request.user, finding, Permissions.Finding_Delete) + # Get the finding form + context = { + "form": DeleteFindingForm(request.POST, instance=finding), + } + # Process the form + request, success = self.process_form(request, finding, context) + # Handle the case of a successful form + if success: + return redirect_to_return_url_or_else(request, reverse("view_test", args=(finding.test.id,))) + raise PermissionDenied() @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") @@ -891,47 +1476,6 @@ def apply_template_cwe(request, fid): raise PermissionDenied() -@user_is_authorized(Finding, Permissions.Finding_Delete, "fid") -def delete_finding(request, fid): - finding = get_object_or_404(Finding, id=fid) - - if request.method == "POST": - form = DeleteFindingForm(request.POST, instance=finding) - if form.is_valid(): - tid = finding.test.id - product = finding.test.engagement.product - finding.delete() - calculate_grade(product) - messages.add_message( - request, - messages.SUCCESS, - "Finding deleted successfully.", - extra_tags="alert-success", - ) - create_notification( - event="other", - title="Deletion of %s" % finding.title, - description='The finding "%s" was deleted by %s' - % (finding.title, request.user), - product=product, - url=request.build_absolute_uri(reverse("all_findings")), - recipients=[finding.test.engagement.lead], - icon="exclamation-triangle", - ) - return redirect_to_return_url_or_else( - request, reverse("view_test", args=(tid,)) - ) - else: - messages.add_message( - request, - messages.ERROR, - "Unable to delete finding, please try again.", - extra_tags="alert-danger", - ) - else: - raise PermissionDenied() - - @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") def copy_finding(request, fid): finding = get_object_or_404(Finding, id=fid) @@ -991,325 +1535,6 @@ def copy_finding(request, fid): ) -@user_is_authorized(Finding, Permissions.Finding_Edit, "fid") -def edit_finding(request, fid): - system_settings = System_Settings.objects.get() - - finding = get_object_or_404(Finding, id=fid) - old_status = finding.status() - old_finding = copy.copy(finding) - burp_rr = BurpRawRequestResponse.objects.filter(finding=finding).first() - if burp_rr: - req_resp = (burp_rr.get_request(), burp_rr.get_response()) - else: - req_resp = None - - form = FindingForm( - instance=finding, - req_resp=req_resp, - can_edit_mitigated_data=finding_helper.can_edit_mitigated_data(request.user), - initial={"vulnerability_ids": "\n".join(finding.vulnerability_ids)}, - ) - jform = None - push_all_jira_issues = jira_helper.is_push_all_issues(finding) - gform = None - use_jira = jira_helper.get_jira_project(finding) is not None - - github_enabled = finding.has_github_issue() - - if request.method == "POST": - form = FindingForm( - request.POST, - instance=finding, - req_resp=None, - can_edit_mitigated_data=finding_helper.can_edit_mitigated_data( - request.user - ), - ) - - if finding.active: - if (form["active"].value() is False or form["false_p"].value() - or form["out_of_scope"].value()) and form["duplicate"].value() is False: - note_type_activation = Note_Type.objects.filter(is_active=True).count() - closing_disabled = 0 - if note_type_activation: - closing_disabled = len(get_missing_mandatory_notetypes(finding)) - if closing_disabled != 0: - error_inactive = ValidationError( - "Can not set a finding as inactive without adding all mandatory notes", - code="inactive_without_mandatory_notes", - ) - error_false_p = ValidationError( - "Can not set a finding as false positive without adding all mandatory notes", - code="false_p_without_mandatory_notes", - ) - error_out_of_scope = ValidationError( - "Can not set a finding as out of scope without adding all mandatory notes", - code="out_of_scope_without_mandatory_notes", - ) - if form["active"].value() is False: - form.add_error("active", error_inactive) - if form["false_p"].value(): - form.add_error("false_p", error_false_p) - if form["out_of_scope"].value(): - form.add_error("out_of_scope", error_out_of_scope) - messages.add_message( - request, - messages.ERROR, - ("Can not set a finding as inactive, " - "false positive or out of scope without adding all mandatory notes"), - extra_tags="alert-danger", - ) - - if use_jira: - jform = JIRAFindingForm( - request.POST, - prefix="jiraform", - push_all=push_all_jira_issues, - instance=finding, - jira_project=jira_helper.get_jira_project(finding), - finding_form=form, - ) - - if form.is_valid() and (jform is None or jform.is_valid()): - if jform: - logger.debug( - "jform.jira_issue: %s", jform.cleaned_data.get("jira_issue") - ) - logger.debug( - JFORM_PUSH_TO_JIRA_MESSAGE, jform.cleaned_data.get("push_to_jira") - ) - - new_finding = form.save(commit=False) - new_finding.test = finding.test - new_finding.numerical_severity = Finding.get_numerical_severity( - new_finding.severity - ) - - if "group" in form.cleaned_data: - finding_group = form.cleaned_data["group"] - finding_helper.update_finding_group(new_finding, finding_group) - - if "risk_accepted" in form.cleaned_data and form["risk_accepted"].value(): - if new_finding.test.engagement.product.enable_simple_risk_acceptance: - ra_helper.simple_risk_accept(new_finding, perform_save=False) - else: - if new_finding.risk_accepted: - ra_helper.risk_unaccept(new_finding, perform_save=False) - - # Save and add new endpoints - finding_helper.add_endpoints(new_finding, form) - - # Remove unrelated endpoints - endpoint_status_list = Endpoint_Status.objects.filter(finding=new_finding) - for endpoint_status in endpoint_status_list: - if endpoint_status.endpoint not in new_finding.endpoints.all(): - endpoint_status.delete() - - new_finding.last_reviewed = timezone.now() - new_finding.last_reviewed_by = request.user - - new_finding.tags = form.cleaned_data["tags"] - - # If active is not checked and CAN_EDIT_MIIGATED_DATA, - # mitigate the finding and the associated endpoints status - if finding_helper.can_edit_mitigated_data(request.user): - if ( - form["active"].value() is False - or form["false_p"].value() - or form["out_of_scope"].value() - ) and form["duplicate"].value() is False: - now = timezone.now() - new_finding.is_mitigated = True - endpoint_status = new_finding.status_finding.all() - for status in endpoint_status: - status.mitigated_by = ( - form.cleaned_data.get("mitigated_by") or request.user - ) - status.mitigated_time = ( - form.cleaned_data.get("mitigated") or now - ) - status.mitigated = True - status.last_modified = timezone.now() - status.save() - - if system_settings.false_positive_history: - # If the finding is being marked as a false positive we dont need to call the - # fp history function because it will be called by the save function - - # If finding was a false positive and is being reactivated: retroactively reactivates all equal findings - if old_finding.false_p and not new_finding.false_p: - if system_settings.retroactive_false_positive_history: - logger.debug('FALSE_POSITIVE_HISTORY: Reactivating existing findings based on: %s', new_finding) - - existing_fp_findings = match_finding_to_existing_findings( - new_finding, product=new_finding.test.engagement.product - ).filter(false_p=True) - - for fp in existing_fp_findings: - logger.debug('FALSE_POSITIVE_HISTORY: Reactivating false positive %i: %s', fp.id, fp) - fp.active = new_finding.active - fp.verified = new_finding.verified - fp.false_p = False - fp.out_of_scope = new_finding.out_of_scope - fp.is_mitigated = new_finding.is_mitigated - fp.save_no_options() - - if "request" in form.cleaned_data or "response" in form.cleaned_data: - try: - burp_rr, _ = BurpRawRequestResponse.objects.get_or_create(finding=finding) - except BurpRawRequestResponse.MultipleObjectsReturned: - burp_rr = BurpRawRequestResponse.objects.filter(finding=finding).first() - burp_rr.burpRequestBase64 = base64.b64encode( - form.cleaned_data["request"].encode() - ) - burp_rr.burpResponseBase64 = base64.b64encode( - form.cleaned_data["response"].encode() - ) - burp_rr.clean() - burp_rr.save() - - push_to_jira = False - jira_message = None - if jform and jform.is_valid(): - # Push to Jira? - - logger.debug( - JFORM_PUSH_TO_JIRA_MESSAGE, jform.cleaned_data.get("push_to_jira") - ) - # can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false - # push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira')) - push_to_jira = push_all_jira_issues or jform.cleaned_data.get( - "push_to_jira" - ) - - logger.debug("push_to_jira: %s", push_to_jira) - logger.debug("push_all_jira_issues: %s", push_all_jira_issues) - logger.debug( - "has_jira_group_issue: %s", new_finding.has_jira_group_issue - ) - - # if the jira issue key was changed, update database - new_jira_issue_key = jform.cleaned_data.get("jira_issue") - # we only support linking / changing if there is no group issue - if not new_finding.has_jira_group_issue: - if new_finding.has_jira_issue: - """ - everything in DD around JIRA integration is based on the internal id - of the issue in JIRA instead of on the public jira issue key. - I have no idea why, but it means we have to retrieve the issue from JIRA - to get the internal JIRA id. we can assume the issue exist, - which is already checked in the validation of the jform - """ - - if not new_jira_issue_key: - jira_helper.finding_unlink_jira(request, new_finding) - jira_message = "Link to JIRA issue removed successfully." - - elif new_jira_issue_key != new_finding.jira_issue.jira_key: - jira_helper.finding_unlink_jira(request, new_finding) - jira_helper.finding_link_jira( - request, new_finding, new_jira_issue_key - ) - jira_message = "Changed JIRA link successfully." - else: - if new_jira_issue_key: - jira_helper.finding_link_jira( - request, new_finding, new_jira_issue_key - ) - jira_message = "Linked a JIRA issue successfully." - - if "githubform-push_to_github" in request.POST: - gform = GITHUBFindingForm( - request.POST, prefix="githubform", enabled=github_enabled - ) - if gform.is_valid(): - if GITHUB_Issue.objects.filter(finding=new_finding).exists(): - update_external_issue(new_finding, old_status, "github") - else: - add_external_issue(new_finding, "github") - - # if there's a finding group, that's what we need to push - push_group_to_jira = push_to_jira and new_finding.finding_group - # any existing finding should be updated - push_to_jira = ( - push_to_jira - and not push_group_to_jira - and new_finding.has_jira_issue - and jira_helper.get_jira_instance(finding).finding_jira_sync - ) - - finding_helper.save_vulnerability_ids( - new_finding, form.cleaned_data["vulnerability_ids"].split() - ) - - # if we're removing the "duplicate" in the edit finding screen - # do not relaunch deduplication, otherwise, it's never taken into account - if old_finding.duplicate and not new_finding.duplicate: - new_finding.duplicate_finding = None - new_finding.save(push_to_jira=push_to_jira, dedupe_option=False) - else: - new_finding.save(push_to_jira=push_to_jira) - - # we only push the group after storing the finding to make sure - # the updated data of the finding is pushed as part of the group - if push_group_to_jira: - jira_helper.push_to_jira(new_finding.finding_group) - - messages.add_message( - request, - messages.SUCCESS, - "Finding saved successfully.", - extra_tags="alert-success", - ) - - if jira_message: - messages.add_message( - request, messages.SUCCESS, jira_message, extra_tags="alert-success" - ) - - return redirect_to_return_url_or_else( - request, reverse("view_finding", args=(new_finding.id,)) - ) - else: - add_error_message_to_response( - "The form has errors, please correct them below." - ) - add_field_errors_to_response(jform) - add_field_errors_to_response(form) - else: - if use_jira: - jform = JIRAFindingForm( - push_all=push_all_jira_issues, - prefix="jiraform", - instance=finding, - jira_project=jira_helper.get_jira_project(finding), - finding_form=form, - ) - - if get_system_setting("enable_github"): - if GITHUB_PKey.objects.filter(product=finding.test.engagement.product - ).exclude(git_conf_id=None): - gform = GITHUBFindingForm(enabled=github_enabled, prefix="githubform") - - product_tab = Product_Tab( - finding.test.engagement.product, title="Edit Finding", tab="findings" - ) - - return render( - request, - "dojo/edit_finding.html", - { - "product_tab": product_tab, - "form": form, - "finding": finding, - "jform": jform, - "gform": gform, - "return_url": get_return_url(request), - }, - ) - - @user_is_authorized(Finding, Permissions.Finding_Edit, "fid") def remediation_date(request, fid): finding = get_object_or_404(Finding, id=fid) diff --git a/dojo/group/urls.py b/dojo/group/urls.py index 3fc0cbbe60c..5348f97c1d1 100644 --- a/dojo/group/urls.py +++ b/dojo/group/urls.py @@ -3,15 +3,15 @@ from dojo.group import views urlpatterns = [ - re_path(r'^group$', views.group, name='groups'), - re_path(r'^group/(?P\d+)$', views.view_group, name='view_group'), - re_path(r'^group/(?P\d+)/edit$', views.edit_group, name='edit_group'), - re_path(r'^group/(?P\d+)/delete', views.delete_group, name='delete_group'), - re_path(r'^group/add$', views.add_group, name='add_group'), - re_path(r'^group/(?P\d+)/add_product_group', views.add_product_group, name='add_product_group_group'), - re_path(r'^group/(?P\d+)/add_product_type_group', views.add_product_type_group, name='add_product_type_group_group'), - re_path(r'^group/(?P\d+)/add_group_member', views.add_group_member, name='add_group_member'), - re_path(r'group/member/(?P\d+)/edit_group_member', views.edit_group_member, name='edit_group_member'), - re_path(r'group/member/(?P\d+)/delete_group_member', views.delete_group_member, name='delete_group_member'), + re_path(r'^group$', views.ListGroups.as_view(), name='groups'), + re_path(r'^group/add$', views.AddGroup.as_view(), name='add_group'), + re_path(r'^group/(?P\d+)$', views.ViewGroup.as_view(), name='view_group'), + re_path(r'^group/(?P\d+)/edit$', views.EditGroup.as_view(), name='edit_group'), + re_path(r'^group/(?P\d+)/delete$', views.DeleteGroup.as_view(), name='delete_group'), + re_path(r'^group/(?P\d+)/add_product_group$', views.add_product_group, name='add_product_group_group'), + re_path(r'^group/(?P\d+)/add_product_type_group$', views.add_product_type_group, name='add_product_type_group_group'), + re_path(r'^group/(?P\d+)/add_group_member$', views.add_group_member, name='add_group_member'), + re_path(r'group/member/(?P\d+)/edit_group_member$', views.edit_group_member, name='edit_group_member'), + re_path(r'group/member/(?P\d+)/delete_group_member$', views.delete_group_member, name='delete_group_member'), re_path(r'^group/(?P\d+)/edit_permissions$', views.edit_permissions, name='edit_group_permissions') ] diff --git a/dojo/group/views.py b/dojo/group/views.py index 9cce9351268..8cdf17b31c3 100644 --- a/dojo/group/views.py +++ b/dojo/group/views.py @@ -1,10 +1,13 @@ import logging +from django.views import View +from django.db.models.query import QuerySet from django.contrib import messages from django.contrib.auth.decorators import user_passes_test from django.contrib.auth.models import Group from django.db.models.deletion import RestrictedError from django.urls import reverse -from django.http import HttpResponseRedirect +from django.http import HttpResponseRedirect, HttpRequest +from django.core.exceptions import PermissionDenied from django.shortcuts import render, get_object_or_404 from django.contrib.admin.utils import NestedObjects from django.db import DEFAULT_DB_ALIAS @@ -15,177 +18,356 @@ from dojo.forms import DojoGroupForm, DeleteGroupForm, Add_Product_Group_GroupForm, \ Add_Product_Type_Group_GroupForm, Add_Group_MemberForm, Edit_Group_MemberForm, \ Delete_Group_MemberForm, GlobalRoleForm, ConfigurationPermissionsForm -from dojo.models import Dojo_Group, Product_Group, Product_Type_Group, Dojo_Group_Member -from dojo.utils import get_page_items, add_breadcrumb, is_title_in_breadcrumbs +from dojo.models import Dojo_Group, Product_Group, Product_Type_Group, Dojo_Group_Member, Global_Role +from dojo.utils import get_page_items, add_breadcrumb, is_title_in_breadcrumbs, redirect_to_return_url_or_else from dojo.group.queries import get_authorized_groups, get_product_groups_for_group, \ get_product_type_groups_for_group, get_group_members_for_group from dojo.authorization.authorization_decorators import user_is_configuration_authorized +from dojo.authorization.authorization import user_has_configuration_permission, user_has_permission_or_403 from dojo.group.utils import get_auth_group_name logger = logging.getLogger(__name__) -@user_is_configuration_authorized('auth.view_group') -def group(request): - groups = get_authorized_groups(Permissions.Group_View) - groups = GroupFilter(request.GET, queryset=groups) - paged_groups = get_page_items(request, groups.qs, 25) - add_breadcrumb(title="All Groups", top_level=True, request=request) - return render(request, 'dojo/groups.html', { - 'groups': paged_groups, - 'filtered': groups, - 'name': 'All Groups' - }) - - -# Users need to be authorized to view groups in general and only the groups they are a member of -# because with the group they can see user information that might be considered as confidential -@user_is_configuration_authorized('auth.view_group') -@user_is_authorized(Dojo_Group, Permissions.Group_View, 'gid') -def view_group(request, gid): - group = get_object_or_404(Dojo_Group, id=gid) - products = get_product_groups_for_group(group) - product_types = get_product_type_groups_for_group(group) - group_members = get_group_members_for_group(group) - - # Create authorization group if it doesn't exist and add product members - if not group.auth_group: - auth_group = Group(name=get_auth_group_name(group)) - auth_group.save() - group.auth_group = auth_group - members = group.users.all() - for member in members: - auth_group.user_set.add(member) - group.save() - configuration_permission_form = ConfigurationPermissionsForm(group=group) - - add_breadcrumb(title="View Group", top_level=False, request=request) - return render(request, 'dojo/view_group.html', { - 'group': group, - 'products': products, - 'product_types': product_types, - 'group_members': group_members, - 'configuration_permission_form': configuration_permission_form, - }) - - -@user_is_authorized(Dojo_Group, Permissions.Group_Edit, 'gid') -def edit_group(request, gid): - group = get_object_or_404(Dojo_Group, id=gid) - form = DojoGroupForm(instance=group) - - global_role = group.global_role if hasattr(group, 'global_role') else None - if global_role is None: - previous_global_role = None - global_role_form = GlobalRoleForm() - else: - previous_global_role = global_role.role - global_role_form = GlobalRoleForm(instance=global_role) - - if request.method == 'POST': - form = DojoGroupForm(request.POST, instance=group) - - if global_role is None: - global_role_form = GlobalRoleForm(request.POST) - else: - global_role_form = GlobalRoleForm(request.POST, instance=global_role) - - if form.is_valid() and global_role_form.is_valid(): - if global_role_form.cleaned_data['role'] != previous_global_role and not request.user.is_superuser: - messages.add_message(request, - messages.WARNING, - 'Only superusers are allowed to change the global role.', - extra_tags='alert-warning') +class ListGroups(View): + def get_groups(self): + return get_authorized_groups(Permissions.Group_View) + + def get_initial_context(self, request: HttpRequest, groups: QuerySet[Dojo_Group]): + filtered_groups = GroupFilter(request.GET, queryset=groups) + return { + "name": "All Groups", + "filtered": filtered_groups, + "groups": get_page_items(request, filtered_groups.qs, 25), + } + + def get_template(self): + return "dojo/groups.html" + + def get(self, request: HttpRequest): + # quick permission check + if not user_has_configuration_permission(request.user, 'auth.view_group'): + raise PermissionDenied + # Fetch the groups + groups = self.get_groups() + # Set up the initial context + context = self.get_initial_context(request, groups) + # Add a breadcrumb + add_breadcrumb(title="All Groups", top_level=True, request=request) + # Render the page + return render(request, self.get_template(), context) + + +class ViewGroup(View): + def get_group(self, group_id: int): + return get_object_or_404(Dojo_Group, id=group_id) + + def get_initial_context(self, group: Dojo_Group): + return { + "group": group, + "products": get_product_groups_for_group(group), + "product_types": get_product_type_groups_for_group(group), + "group_members": get_group_members_for_group(group), + } + + def set_configuration_permissions(self, group: Dojo_Group, context: dict): + # Create authorization group if it doesn't exist and add product members + if not group.auth_group: + auth_group = Group(name=get_auth_group_name(group)) + auth_group.save() + group.auth_group = auth_group + members = group.users.all() + for member in members: + auth_group.user_set.add(member) + group.save() + # create the config permissions form + context["configuration_permission_form"] = ConfigurationPermissionsForm(group=group) + + return context + + def get_template(self): + return "dojo/view_group.html" + + def get(self, request: HttpRequest, group_id: int): + # Fetch the group + group = self.get_group(group_id) + # quick permission check + if not user_has_configuration_permission(request.user, 'auth.view_group'): + raise PermissionDenied + user_has_permission_or_403(request.user, group, Permissions.Group_View) + # Set up the initial context + context = self.get_initial_context(group) + # Set up the config permissions + context = self.set_configuration_permissions(group, context) + # Add a breadcrumb + add_breadcrumb(title="View Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + +class EditGroup(View): + def get_group(self, group_id: int): + return get_object_or_404(Dojo_Group, id=group_id) + + def get_global_role(self, group: Dojo_Group): + # Try to pull the global role from the group object + return group.global_role if hasattr(group, 'global_role') else None + + def get_group_form(self, request: HttpRequest, group: Dojo_Group): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "instance": group, + } + + return DojoGroupForm(*args, **kwargs) + + def get_global_role_form(self, request: HttpRequest, global_role: Global_Role): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = {} + # Add the global role to te kwargs if it is present + if global_role: + kwargs["instance"] = global_role + + return GlobalRoleForm(*args, **kwargs) + + def get_initial_context(self, request: HttpRequest, group: Dojo_Group, global_role: Global_Role): + return { + "form": self.get_group_form(request, group), + "global_role_form": self.get_global_role_form(request, global_role), + "previous_global_role": global_role.role if global_role else None, + } + + def process_forms(self, request: HttpRequest, group: Dojo_Group, context: dict): + # Validate the forms + if context["form"].is_valid() and context["global_role_form"].is_valid(): + # Determine if the previous global roles was changed with proper authorization + if context["global_role_form"].cleaned_data['role'] != context["previous_global_role"] and not request.user.is_superuser: + messages.add_message( + request, + messages.WARNING, + 'Only superusers are allowed to change the global role.', + extra_tags='alert-warning') else: - form.save() - global_role = global_role_form.save(commit=False) + context["form"].save() + global_role = context["global_role_form"].save(commit=False) global_role.group = group global_role.save() - messages.add_message(request, - messages.SUCCESS, - 'Group saved successfully.', - extra_tags='alert-success') - else: - messages.add_message(request, - messages.ERROR, - 'Group was not saved successfully.', - extra_tags='alert_danger') - - add_breadcrumb(title="Edit Group", top_level=False, request=request) - return render(request, "dojo/add_group.html", { - 'form': form, - 'global_role_form': global_role_form, - }) - - -@user_is_authorized(Dojo_Group, Permissions.Group_Delete, 'gid') -def delete_group(request, gid): - group = get_object_or_404(Dojo_Group, id=gid) - form = DeleteGroupForm(instance=group) - - if request.method == 'POST': - if 'id' in request.POST and str(group.id) == request.POST['id']: - form = DeleteGroupForm(request.POST, instance=group) - if form.is_valid(): - try: - group.delete() - messages.add_message(request, - messages.SUCCESS, - 'Group and relationships successfully removed.', - extra_tags='alert-success') - except RestrictedError as err: - messages.add_message(request, - messages.WARNING, - 'Group cannot be deleted: {}'.format(err), - extra_tags='alert-warning') - return HttpResponseRedirect(reverse('groups')) + messages.add_message( + request, + messages.SUCCESS, + 'Group saved successfully.', + extra_tags='alert-success') - collector = NestedObjects(using=DEFAULT_DB_ALIAS) - collector.collect([group]) - rels = collector.nested() - add_breadcrumb(title="Delete Group", top_level=False, request=request) - return render(request, 'dojo/delete_group.html', { - 'to_delete': group, - 'form': form, - 'rels': rels - }) - - -@user_is_configuration_authorized('auth.add_group') -def add_group(request): - form = DojoGroupForm - global_role_form = GlobalRoleForm() - group = None - - if request.method == 'POST': - form = DojoGroupForm(request.POST) - global_role_form = GlobalRoleForm(request.POST) - if form.is_valid() and global_role_form.is_valid(): - if global_role_form.cleaned_data['role'] is not None and not request.user.is_superuser: - messages.add_message(request, messages.ERROR, - 'Only superusers are allowed to set global role.', - extra_tags='alert-warning') + return request, True + else: + messages.add_message( + request, + messages.ERROR, + 'Group was not saved successfully.', + extra_tags='alert_danger') + + return request, False + + def get_template(self): + return "dojo/add_group.html" + + def get(self, request: HttpRequest, group_id: int): + # Fetch the group and global role + group = self.get_group(group_id) + global_role = self.get_global_role(group) + # quick permission check + user_has_permission_or_403(request.user, group, Permissions.Group_Edit) + # Set up the initial context + context = self.get_initial_context(request, group, global_role) + # Add a breadcrumb + add_breadcrumb(title="Edit Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest, group_id: int): + # Fetch the group and global role + group = self.get_group(group_id) + global_role = self.get_global_role(group) + # quick permission check + user_has_permission_or_403(request.user, group, Permissions.Group_Edit) + # Set up the initial context + context = self.get_initial_context(request, group, global_role) + # Process the forms + request, success = self.process_forms(request, group, context) + # Handle the case of a successful form + if success: + return redirect_to_return_url_or_else(request, reverse("view_group", args=(group_id,))) + # Add a breadcrumb + add_breadcrumb(title="Edit Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + +class DeleteGroup(View): + def get_group(self, group_id: int): + return get_object_or_404(Dojo_Group, id=group_id) + + def get_group_form(self, request: HttpRequest, group: Dojo_Group): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "instance": group, + } + + return DeleteGroupForm(*args, **kwargs) + + def get_initial_context(self, request: HttpRequest, group: Dojo_Group): + # Add the related objects to the delete page + collector = NestedObjects(using=DEFAULT_DB_ALIAS) + collector.collect([group]) + return { + "form": self.get_group_form(request, group), + "to_delete": group, + "rels": collector.nested() + } + + def process_forms(self, request: HttpRequest, group: Dojo_Group, context: dict): + # Validate the forms + if context["form"].is_valid(): + try: + group.delete() + messages.add_message( + request, + messages.SUCCESS, + 'Group and relationships successfully removed.', + extra_tags='alert-success') + except RestrictedError as err: + messages.add_message( + request, + messages.WARNING, + f'Group cannot be deleted: {err}', + extra_tags='alert-warning', + ) + return request, False + + return request, True + return request, False + + def get_template(self): + return "dojo/delete_group.html" + + def get(self, request: HttpRequest, group_id: int): + # Fetch the group and global role + group = self.get_group(group_id) + # quick permission check + user_has_permission_or_403(request.user, group, Permissions.Group_Delete) + # Set up the initial context + context = self.get_initial_context(request, group) + # Add a breadcrumb + add_breadcrumb(title="Delete Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest, group_id: int): + # Fetch the group and global role + group = self.get_group(group_id) + # quick permission check + user_has_permission_or_403(request.user, group, Permissions.Group_Delete) + # Set up the initial context + context = self.get_initial_context(request, group) + # Process the forms + request, success = self.process_forms(request, group, context) + # Handle the case of a successful form + if success: + return redirect_to_return_url_or_else(request, reverse("groups")) + # Add a breadcrumb + add_breadcrumb(title="Delete Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + +class AddGroup(View): + def get_group_form(self, request: HttpRequest): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = {} + + return DojoGroupForm(*args, **kwargs) + + def get_global_role_form(self, request: HttpRequest): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = {} + + return GlobalRoleForm(*args, **kwargs) + + def get_initial_context(self, request: HttpRequest): + return { + "form": self.get_group_form(request), + "global_role_form": self.get_global_role_form(request), + } + + def process_forms(self, request: HttpRequest, context: dict): + group = None + # Validate the forms + if context["form"].is_valid() and context["global_role_form"].is_valid(): + if context["global_role_form"].cleaned_data['role'] is not None and not request.user.is_superuser: + messages.add_message( + request, + messages.ERROR, + 'Only superusers are allowed to set global role.', + extra_tags='alert-warning') else: - group = form.save() - global_role = global_role_form.save(commit=False) + group = context["form"].save() + global_role = context["global_role_form"].save(commit=False) global_role.group = group global_role.save() - - messages.add_message(request, - messages.SUCCESS, - 'Group was added successfully.', - extra_tags='alert-success') - return HttpResponseRedirect(reverse('view_group', args=(group.id,))) + messages.add_message( + request, + messages.SUCCESS, + 'Group was added successfully.', + extra_tags='alert-success') + return request, group, True else: - messages.add_message(request, messages.ERROR, - 'Group was not added successfully.', - extra_tags='alert-danger') - - add_breadcrumb(title="Add Group", top_level=False, request=request) - return render(request, "dojo/add_group.html", { - 'form': form, - 'global_role_form': global_role_form, - }) + messages.add_message( + request, + messages.ERROR, + 'Group was not added successfully.', + extra_tags='alert-danger') + + return request, group, False + + def get_template(self): + return "dojo/add_group.html" + + def get(self, request: HttpRequest): + # quick permission check + if not user_has_configuration_permission(request.user, 'auth.add_group'): + raise PermissionDenied + # Set up the initial context + context = self.get_initial_context(request) + # Add a breadcrumb + add_breadcrumb(title="Add Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest): + # quick permission check + if not user_has_configuration_permission(request.user, 'auth.add_group'): + raise PermissionDenied + # Set up the initial context + context = self.get_initial_context(request) + # Process the forms + request, group, success = self.process_forms(request, context) + # Handle the case of a successful form + if success: + return redirect_to_return_url_or_else(request, reverse("view_group", args=(group.id,))) + # Add a breadcrumb + add_breadcrumb(title="Add Group", top_level=False, request=request) + # Render the page + return render(request, self.get_template(), context) @user_is_authorized(Dojo_Group, Permissions.Group_Manage_Members, 'gid') diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index c95d0df6772..e09cb5354ec 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -268,8 +268,11 @@ def send_msteams_notification(event, user=None, *args, **kwargs): @app.task def send_mail_notification(event, user=None, *args, **kwargs): from dojo.utils import get_system_setting - - if user: + email_from_address = get_system_setting('email_from') + # Attempt to get the "to" address + if "recipient" in kwargs: + address = kwargs.get("recipient") + elif user: address = user.email else: address = get_system_setting('mail_notifications_to') @@ -277,16 +280,16 @@ def send_mail_notification(event, user=None, *args, **kwargs): logger.debug('notification email for user %s to %s', user, address) try: - subject = '%s notification' % get_system_setting('team_name') + subject = f"{get_system_setting('team_name')} notification" if 'title' in kwargs: - subject += ': %s' % kwargs['title'] + subject += f": {kwargs['title']}" email = EmailMessage( subject, create_notification_message(event, user, 'mail', *args, **kwargs), - get_system_setting('email_from'), + email_from_address, [address], - headers={"From": "{}".format(get_system_setting('email_from'))} + headers={"From": f"{email_from_address}"}, ) email.content_subtype = 'html' logger.debug('sending email alert') @@ -296,7 +299,6 @@ def send_mail_notification(event, user=None, *args, **kwargs): except Exception as e: logger.exception(e) log_alert(e, "Email Notification", title=kwargs['title'], description=str(e), url=kwargs['url']) - pass def send_alert_notification(event, user=None, *args, **kwargs): diff --git a/dojo/notifications/urls.py b/dojo/notifications/urls.py index 5474c993259..68d8c3f22a6 100644 --- a/dojo/notifications/urls.py +++ b/dojo/notifications/urls.py @@ -2,8 +2,8 @@ from . import views urlpatterns = [ - re_path(r'^notifications$', views.personal_notifications, name='notifications'), - re_path(r'^notifications/system$', views.system_notifications, name='system_notifications'), - re_path(r'^notifications/personal$', views.personal_notifications, name='personal_notifications'), - re_path(r'^notifications/template$', views.template_notifications, name='template_notifications') + re_path(r'^notifications$', views.PersonalNotificationsView.as_view(), name='notifications'), + re_path(r'^notifications/system$', views.SystemNotificationsView.as_view(), name='system_notifications'), + re_path(r'^notifications/personal$', views.PersonalNotificationsView.as_view(), name='personal_notifications'), + re_path(r'^notifications/template$', views.TemplateNotificationsView.as_view(), name='template_notifications') ] diff --git a/dojo/notifications/views.py b/dojo/notifications/views.py index fedff5d987c..98386db17c9 100644 --- a/dojo/notifications/views.py +++ b/dojo/notifications/views.py @@ -1,10 +1,11 @@ -# # product import logging from django.contrib import messages -from django.contrib.auth.decorators import user_passes_test from django.shortcuts import render +from django.http import HttpRequest from django.utils.translation import gettext as _ +from django.views import View +from django.core.exceptions import PermissionDenied from dojo.models import Notifications from dojo.utils import get_enabled_notifications_list @@ -14,76 +15,118 @@ logger = logging.getLogger(__name__) -def render_page(request, form, scope: str): - return render(request, 'dojo/notifications.html', - {'form': form, - 'scope': scope, - 'enabled_notifications': get_enabled_notifications_list(), - 'admin': request.user.is_superuser - }) - - -def personal_notifications(request): - try: - notifications_obj = Notifications.objects.get(user=request.user, product__isnull=True) - except: - notifications_obj = Notifications(user=request.user) - - form = NotificationsForm(instance=notifications_obj) - - if request.method == 'POST': - form = NotificationsForm(request.POST, instance=notifications_obj) - if form.is_valid(): - form.save() - messages.add_message(request, - messages.SUCCESS, - _('Settings saved.'), - extra_tags='alert-success') - - add_breadcrumb(title=_("Personal notification settings"), top_level=False, request=request) - - return render_page(request, form, 'personal') - - -@user_passes_test(lambda u: u.is_superuser) -def system_notifications(request): - try: - notifications_obj = Notifications.objects.get(user=None, product__isnull=True, template=False) - except: - notifications_obj = Notifications(user=None, template=False) - - form = NotificationsForm(instance=notifications_obj) - if request.method == 'POST': - form = NotificationsForm(request.POST, instance=notifications_obj) - if form.is_valid(): - form.save() - messages.add_message(request, - messages.SUCCESS, - _('Settings saved.'), - extra_tags='alert-success') - - add_breadcrumb(title=_("System notification settings"), top_level=False, request=request) - - return render_page(request, form, 'system') - - -@user_passes_test(lambda u: u.is_superuser) -def template_notifications(request): - try: - notifications_obj = Notifications.objects.get(template=True) - except: - notifications_obj = Notifications(user=None, template=True) - - form = NotificationsForm(instance=notifications_obj) - if request.method == 'POST': - form = NotificationsForm(request.POST, instance=notifications_obj) - if form.is_valid(): - form.save() - messages.add_message(request, - messages.SUCCESS, - _('Settings saved.'), - extra_tags='alert-success') - - add_breadcrumb(title=_("Template notification settings"), top_level=False, request=request) - - return render_page(request, form, 'template') +class SystemNotificationsView(View): + def get_notifications(self, request: HttpRequest): + try: + notifications = Notifications.objects.get(user=None, product__isnull=True, template=False) + except Notifications.DoesNotExist: + notifications = Notifications(user=None, template=False) + + return notifications + + def check_user_permissions(self, request: HttpRequest): + if not request.user.is_superuser: + raise PermissionDenied() + + def get_form(self, request: HttpRequest, notifications: Notifications): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "instance": notifications, + } + + return NotificationsForm(*args, **kwargs) + + def get_enabled_notifications(self): + return get_enabled_notifications_list() + + def get_initial_context(self, request: HttpRequest, notifications: Notifications, scope: str): + return { + 'form': self.get_form(request, notifications), + 'scope': scope, + 'enabled_notifications': self.get_enabled_notifications(), + 'admin': request.user.is_superuser + } + + def set_breadcrumbs(self, request: HttpRequest): + add_breadcrumb(title=_("System notification settings"), top_level=False, request=request) + return request + + def process_form(self, request: HttpRequest, context: dict): + if context["form"].is_valid(): + context["form"].save() + messages.add_message( + request, + messages.SUCCESS, + _('Settings saved.'), + extra_tags='alert-success') + return request, True + return request, False + + def get_template(self): + return "dojo/notifications.html" + + def get_scope(self): + return "system" + + def get(self, request: HttpRequest): + # Check permissions + self.check_user_permissions(request) + # Get the notifications object + notifications = self.get_notifications(request) + # Set up the initial context + context = self.get_initial_context(request, notifications, self.get_scope()) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest): + # Check permissions + self.check_user_permissions(request) + # Get the notifications object + notifications = self.get_notifications(request) + # Set up the initial context + context = self.get_initial_context(request, notifications, self.get_scope()) + # Determine the validity of the form + request, success = self.process_form(request, context) + # Add any breadcrumbs + request = self.set_breadcrumbs(request) + # Render the page + return render(request, self.get_template(), context) + + +class PersonalNotificationsView(SystemNotificationsView): + def get_notifications(self, request: HttpRequest): + try: + notifications = Notifications.objects.get(user=request.user, product__isnull=True) + except Notifications.DoesNotExist: + notifications = Notifications(user=request.user) + return notifications + + def check_user_permissions(self, request: HttpRequest): + pass + + def get_scope(self): + return "personal" + + def set_breadcrumbs(self, request: HttpRequest): + add_breadcrumb(title=_("Personal notification settings"), top_level=False, request=request) + return request + + +class TemplateNotificationsView(SystemNotificationsView): + def get_notifications(self, request: HttpRequest): + try: + notifications = Notifications.objects.get(template=True) + except Notifications.DoesNotExist: + notifications = Notifications(user=None, template=True) + return notifications + + def get_scope(self): + return "template" + + def set_breadcrumbs(self, request: HttpRequest): + add_breadcrumb(title=_("Template notification settings"), top_level=False, request=request) + return request diff --git a/dojo/product/urls.py b/dojo/product/urls.py index 4625e0e8d9e..cfee2111cc6 100644 --- a/dojo/product/urls.py +++ b/dojo/product/urls.py @@ -38,7 +38,9 @@ name='edit_notifications'), re_path(r'^product/(?P\d+)/edit_meta_data$', views.edit_meta_data, name='edit_meta_data'), - re_path(r'^product/(?P\d+)/ad_hoc_finding$', views.ad_hoc_finding, + re_path( + r'^product/(?P\d+)/ad_hoc_finding$', + views.AdHocFindingView.as_view(), name='ad_hoc_finding'), re_path(r'^product/(?P\d+)/engagement_presets$', views.engagement_presets, name='engagement_presets'), diff --git a/dojo/product/views.py b/dojo/product/views.py index 537d908848e..aeb6415ea69 100755 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -16,11 +16,12 @@ from django.db.models import Sum, Count, Q, Max, Prefetch, F, OuterRef, Subquery from django.db.models.query import QuerySet from django.core.exceptions import ValidationError, PermissionDenied -from django.http import HttpResponseRedirect, Http404, JsonResponse +from django.http import HttpResponseRedirect, Http404, JsonResponse, HttpRequest from django.shortcuts import render, get_object_or_404 from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext as _ +from django.views import View from dojo.templatetags.display_tags import asvs_calc_level from dojo.filters import ProductEngagementFilter, ProductFilter, EngagementFilter, MetricsEndpointFilter, \ @@ -1217,160 +1218,283 @@ def edit_meta_data(request, pid): }) -@user_is_authorized(Product, Permissions.Finding_Add, 'pid') -def ad_hoc_finding(request, pid): - prod = Product.objects.get(id=pid) - test_type, res = Test_Type.objects.get_or_create(name=_("Pen Test")) - test = None - try: - eng = Engagement.objects.get(product=prod, name=_("Ad Hoc Engagement")) - tests = Test.objects.filter(engagement=eng) +class AdHocFindingView(View): + def get_product(self, product_id: int): + return get_object_or_404(Product, id=product_id) + + def get_test_type(self): + test_type, nil = Test_Type.objects.get_or_create(name=_("Pen Test")) + return test_type - if len(tests) != 0: - test = tests[0] + def get_engagement(self, product: Product): + try: + return Engagement.objects.get(product=product, name=_("Ad Hoc Engagement")) + except Engagement.DoesNotExist: + return Engagement.objects.create( + name=_("Ad Hoc Engagement"), + target_start=timezone.now(), + target_end=timezone.now(), + active=False, product=product) + + def get_test(self, engagement: Engagement, test_type: Test_Type): + if test := Test.objects.filter(engagement=engagement).first(): + return test else: - test = Test(engagement=eng, test_type=test_type, - target_start=timezone.now(), target_end=timezone.now()) - test.save() - except: - eng = Engagement(name=_("Ad Hoc Engagement"), target_start=timezone.now(), - target_end=timezone.now(), active=False, product=prod) - eng.save() - test = Test(engagement=eng, test_type=test_type, - target_start=timezone.now(), target_end=timezone.now()) - test.save() - form_error = False - push_all_jira_issues = jira_helper.is_push_all_issues(test) - jform = None - gform = None - form = AdHocFindingForm(initial={'date': timezone.now().date()}, req_resp=None, product=prod) - use_jira = jira_helper.get_jira_project(test) is not None + return Test.objects.create( + engagement=engagement, + test_type=test_type, + target_start=timezone.now(), + target_end=timezone.now()) + + def create_nested_objects(self, product: Product): + engagement = self.get_engagement(product) + test_type = self.get_test_type() + return self.get_test(engagement, test_type) + + def get_initial_context(self, request: HttpRequest, test: Test): + # Get the finding form first since it is used in another place + finding_form = self.get_finding_form(request, test.engagement.product) + product_tab = Product_Tab(test.engagement.product, title=_("Add Finding"), tab="engagements") + product_tab.setEngagement(test.engagement) + return { + "form": finding_form, + "product_tab": product_tab, + "temp": False, + "tid": test.id, + "pid": test.engagement.product.id, + "form_error": False, + "jform": self.get_jira_form(request, test, finding_form=finding_form), + "gform": self.get_github_form(request, test), + } + + def get_finding_form(self, request: HttpRequest, product: Product): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "initial": {'date': timezone.now().date()}, + "req_resp": None, + "product": product, + } + # Remove the initial state on post + if request.method == "POST": + kwargs.pop("initial") + + return AdHocFindingForm(*args, **kwargs) + + def get_jira_form(self, request: HttpRequest, test: Test, finding_form: AdHocFindingForm = None): + # Determine if jira should be used + if (jira_project := jira_helper.get_jira_project(test)) is not None: + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "push_all": jira_helper.is_push_all_issues(test), + "prefix": "jiraform", + "jira_project": jira_project, + "finding_form": finding_form, + } + + return JIRAFindingForm(*args, **kwargs) + return None + + def get_github_form(self, request: HttpRequest, test: Test): + # Determine if github should be used + if get_system_setting("enable_github"): + # Ensure there is a github conf correctly configured for the product + config_present = GITHUB_PKey.objects.filter(product=test.engagement.product) + if config_present := config_present.exclude(git_conf_id=None): + # Set up the args for the form + args = [request.POST] if request.method == "POST" else [] + # Set the initial form args + kwargs = { + "enabled": jira_helper.is_push_all_issues(test), + "prefix": "githubform" + } + + return GITHUBFindingForm(*args, **kwargs) + return None + + def validate_status_change(self, request: HttpRequest, context: dict): + if ((context["form"]['active'].value() is False or + context["form"]['false_p'].value()) and + context["form"]['duplicate'].value() is False): - if request.method == 'POST': - form = AdHocFindingForm(request.POST, req_resp=None, product=prod) - if (form['active'].value() is False or form['false_p'].value()) and form['duplicate'].value() is False: closing_disabled = Note_Type.objects.filter(is_mandatory=True, is_active=True).count() if closing_disabled != 0: - error_inactive = ValidationError(_('Can not set a finding as inactive without adding all mandatory notes'), - code='inactive_without_mandatory_notes') + error_inactive = ValidationError( + _('Can not set a finding as inactive without adding all mandatory notes'), + code='inactive_without_mandatory_notes' + ) error_false_p = ValidationError( _('Can not set a finding as false positive without adding all mandatory notes'), - code='false_p_without_mandatory_notes') - if form['active'].value() is False: - form.add_error('active', error_inactive) - if form['false_p'].value(): - form.add_error('false_p', error_false_p) - messages.add_message(request, - messages.ERROR, - _('Can not set a finding as inactive or false positive without adding all mandatory notes'), - extra_tags='alert-danger') - if use_jira: - jform = JIRAFindingForm(request.POST, prefix='jiraform', push_all=push_all_jira_issues, - jira_project=jira_helper.get_jira_project(test), finding_form=form) - - if form.is_valid() and (jform is None or jform.is_valid()): - new_finding = form.save(commit=False) - new_finding.test = test - new_finding.reporter = request.user - new_finding.numerical_severity = Finding.get_numerical_severity( - new_finding.severity) - new_finding.tags = form.cleaned_data['tags'] - new_finding.save() - + code='false_p_without_mandatory_notes' + ) + if context["form"]['active'].value() is False: + context["form"].add_error('active', error_inactive) + if context["form"]['false_p'].value(): + context["form"].add_error('false_p', error_false_p) + messages.add_message( + request, + messages.ERROR, + _('Can not set a finding as inactive or false positive without adding all mandatory notes'), + extra_tags='alert-danger') + + return request + + def process_finding_form(self, request: HttpRequest, test: Test, context: dict): + finding = None + if context["form"].is_valid(): + finding = context["form"].save(commit=False) + finding.test = test + finding.reporter = request.user + finding.numerical_severity = Finding.get_numerical_severity(finding.severity) + finding.tags = context["form"].cleaned_data['tags'] + finding.save() # Save and add new endpoints - finding_helper.add_endpoints(new_finding, form) + finding_helper.add_endpoints(finding, context["form"]) + # Save the finding at the end and return + finding.save() + + return finding, request, True + else: + add_error_message_to_response("The form has errors, please correct them below.") + add_field_errors_to_response(context["form"]) + + return finding, request, False + + def process_jira_form(self, request: HttpRequest, finding: Finding, context: dict): + # Capture case if the jira not being enabled + if context["jform"] is None: + return request, True, False - new_finding.save() - # Push to jira? - push_to_jira = False + if context["jform"] and context["jform"].is_valid(): + # Push to Jira? + logger.debug('jira form valid') + push_to_jira = jira_helper.is_push_all_issues(finding) or context["jform"].cleaned_data.get('push_to_jira') jira_message = None - if jform and jform.is_valid(): - # Push to Jira? - logger.debug('jira form valid') - push_to_jira = push_all_jira_issues or jform.cleaned_data.get('push_to_jira') - - # if the jira issue key was changed, update database - new_jira_issue_key = jform.cleaned_data.get('jira_issue') - if new_finding.has_jira_issue: - jira_issue = new_finding.jira_issue - - # everything in DD around JIRA integration is based on the internal id of the issue in JIRA - # instead of on the public jira issue key. - # I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id. - # we can assume the issue exist, which is already checked in the validation of the jform - - if not new_jira_issue_key: - jira_helper.finding_unlink_jira(request, new_finding) - jira_message = 'Link to JIRA issue removed successfully.' - - elif new_jira_issue_key != new_finding.jira_issue.jira_key: - jira_helper.finding_unlink_jira(request, new_finding) - jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key) - jira_message = 'Changed JIRA link successfully.' - else: - logger.debug('finding has no jira issue yet') - if new_jira_issue_key: - logger.debug( - 'finding has no jira issue yet, but jira issue specified in request. trying to link.') - jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key) - jira_message = 'Linked a JIRA issue successfully.' - - if 'githubform-push_to_github' in request.POST: - gform = GITHUBFindingForm(request.POST, prefix='jiragithub', enabled=push_all_jira_issues) - if gform.is_valid(): - add_external_issue(new_finding, 'github') + # if the jira issue key was changed, update database + new_jira_issue_key = context["jform"].cleaned_data.get('jira_issue') + if finding.has_jira_issue: + jira_issue = finding.jira_issue + # everything in DD around JIRA integration is based on the internal id of the issue in JIRA + # instead of on the public jira issue key. + # I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id. + # we can assume the issue exist, which is already checked in the validation of the jform + if not new_jira_issue_key: + jira_helper.finding_unlink_jira(request, finding) + jira_message = 'Link to JIRA issue removed successfully.' + + elif new_jira_issue_key != finding.jira_issue.jira_key: + jira_helper.finding_unlink_jira(request, finding) + jira_helper.finding_link_jira(request, finding, new_jira_issue_key) + jira_message = 'Changed JIRA link successfully.' + else: + logger.debug('finding has no jira issue yet') + if new_jira_issue_key: + logger.debug( + 'finding has no jira issue yet, but jira issue specified in request. trying to link.') + jira_helper.finding_link_jira(request, finding, new_jira_issue_key) + jira_message = 'Linked a JIRA issue successfully.' + # Determine if a message should be added + if jira_message: + messages.add_message( + request, messages.SUCCESS, jira_message, extra_tags="alert-success" + ) + + return request, True, push_to_jira + else: + add_field_errors_to_response(context["jform"]) + + return request, False, False - finding_helper.save_vulnerability_ids(new_finding, form.cleaned_data['vulnerability_ids'].split()) + def process_github_form(self, request: HttpRequest, finding: Finding, context: dict): + if "githubform-push_to_github" not in request.POST: + return request, True - new_finding.save(push_to_jira=push_to_jira) + if context["gform"].is_valid(): + add_external_issue(finding, 'github') - if 'request' in form.cleaned_data or 'response' in form.cleaned_data: + return request, True + else: + add_field_errors_to_response(context["gform"]) + + return request, False + + def process_forms(self, request: HttpRequest, test: Test, context: dict): + form_success_list = [] + # Set vars for the completed forms + # Validate finding mitigation + request = self.validate_status_change(request, context) + # Check the validity of the form overall + finding, request, success = self.process_finding_form(request, test, context) + form_success_list.append(success) + request, success, push_to_jira = self.process_jira_form(request, finding, context) + form_success_list.append(success) + request, success = self.process_github_form(request, finding, context) + form_success_list.append(success) + # Determine if all forms were successful + all_forms_valid = all(form_success_list) + # Check the validity of all the forms + if all_forms_valid: + # if we're removing the "duplicate" in the edit finding screen + finding_helper.save_vulnerability_ids(finding, context["form"].cleaned_data["vulnerability_ids"].split()) + # Push things to jira if needed + finding.save(push_to_jira=push_to_jira) + # Save the burp req resp + if "request" in context["form"].cleaned_data or "response" in context["form"].cleaned_data: burp_rr = BurpRawRequestResponse( - finding=new_finding, - burpRequestBase64=base64.b64encode(form.cleaned_data['request'].encode()), - burpResponseBase64=base64.b64encode(form.cleaned_data['response'].encode()), + finding=finding, + burpRequestBase64=base64.b64encode(context["form"].cleaned_data["request"].encode()), + burpResponseBase64=base64.b64encode(context["form"].cleaned_data["response"].encode()), ) burp_rr.clean() burp_rr.save() + # Add a success message + messages.add_message( + request, + messages.SUCCESS, + _('Finding added successfully.'), + extra_tags='alert-success') - messages.add_message(request, - messages.SUCCESS, - _('Finding added successfully.'), - extra_tags='alert-success') - + return finding, request, all_forms_valid + + def get_template(self): + return "dojo/ad_hoc_findings.html" + + def get(self, request: HttpRequest, product_id: int): + # Get the initial objects + product = self.get_product(product_id) + # Make sure the user is authorized + user_has_permission_or_403(request.user, product, Permissions.Finding_Add) + # Create the necessary nested objects + test = self.create_nested_objects(product) + # Set up the initial context + context = self.get_initial_context(request, test) + # Render the form + return render(request, self.get_template(), context) + + def post(self, request: HttpRequest, product_id: int): + # Get the initial objects + product = self.get_product(product_id) + # Make sure the user is authorized + user_has_permission_or_403(request.user, product, Permissions.Finding_Add) + # Create the necessary nested objects + test = self.create_nested_objects(product) + # Set up the initial context + context = self.get_initial_context(request, test) + # Process the form + _, request, success = self.process_forms(request, test, context) + # Handle the case of a successful form + if success: if '_Finished' in request.POST: return HttpResponseRedirect(reverse('view_test', args=(test.id,))) else: return HttpResponseRedirect(reverse('add_findings', args=(test.id,))) else: - form_error = True - add_error_message_to_response(_('The form has errors, please correct them below.')) - add_field_errors_to_response(jform) - add_field_errors_to_response(form) - - else: - if use_jira: - jform = JIRAFindingForm(push_all=jira_helper.is_push_all_issues(test), prefix='jiraform', - jira_project=jira_helper.get_jira_project(test), finding_form=form) - - if get_system_setting('enable_github'): - if GITHUB_PKey.objects.filter(product=test.engagement.product).count() != 0: - gform = GITHUBFindingForm(enabled=push_all_jira_issues, prefix='githubform') - else: - gform = None - - product_tab = Product_Tab(prod, title=_("Add Finding"), tab="engagements") - product_tab.setEngagement(eng) - return render(request, 'dojo/ad_hoc_findings.html', - {'form': form, - 'product_tab': product_tab, - 'temp': False, - 'tid': test.id, - 'pid': pid, - 'form_error': form_error, - 'jform': jform, - 'gform': gform, - }) + context["form_error"] = True + # Render the form + return render(request, self.get_template(), context) @user_is_authorized(Product, Permissions.Product_View, 'pid') diff --git a/dojo/reports/views.py b/dojo/reports/views.py index 06aef9ce627..a0b78e1d44a 100644 --- a/dojo/reports/views.py +++ b/dojo/reports/views.py @@ -28,7 +28,7 @@ from dojo.authorization.roles_permissions import Permissions from dojo.authorization.authorization import user_has_permission_or_403 from dojo.finding.queries import get_authorized_findings -from dojo.finding.views import get_filtered_findings +from dojo.finding.views import BaseListFindings logger = logging.getLogger(__name__) @@ -807,7 +807,12 @@ def get_findings(request): user_has_permission_or_403(request.user, obj, Permissions.Test_View) request.GET = QueryDict(query) - findings = get_filtered_findings(request, pid, eid, tid, filter_name).qs + list_findings = BaseListFindings( + filter_name=filter_name, + product_id=pid, + engagement_id=eid, + test_id=tid) + findings = list_findings.get_fully_filtered_findings(request).qs return findings, obj diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 09861a6af7a..82d4303a941 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1237,7 +1237,7 @@ def saml2_attrib_map_format(dict): 'JFrog Xray Scan': ['title', 'description', 'component_name', 'component_version'], 'CycloneDX Scan': ['vuln_id_from_tool', 'component_name', 'component_version'], 'SSLyze Scan (JSON)': ['title', 'description'], - 'Harbor Vulnerability Scan': ['title'], + 'Harbor Vulnerability Scan': ['title', 'mitigation'], 'Rusty Hog Scan': ['file_path', 'payload'], 'StackHawk HawkScan': ['vuln_id_from_tool', 'component_name', 'component_version'], 'Hydra Scan': ['title', 'description'], @@ -1319,7 +1319,7 @@ def saml2_attrib_map_format(dict): # List of fields that are known to be usable in hash_code computation) # 'endpoints' is a pseudo field that uses the endpoints (for dynamic scanners) # 'unique_id_from_tool' is often not needed here as it can be used directly in the dedupe algorithm, but it's also possible to use it for hashing -HASHCODE_ALLOWED_FIELDS = ['title', 'cwe', 'vulnerability_ids', 'line', 'file_path', 'payload', 'component_name', 'component_version', 'description', 'endpoints', 'unique_id_from_tool', 'severity', 'vuln_id_from_tool'] +HASHCODE_ALLOWED_FIELDS = ['title', 'cwe', 'vulnerability_ids', 'line', 'file_path', 'payload', 'component_name', 'component_version', 'description', 'endpoints', 'unique_id_from_tool', 'severity', 'vuln_id_from_tool', 'mitigation'] # Adding fields to the hash_code calculation regardless of the previous settings HASH_CODE_FIELDS_ALWAYS = ['service'] diff --git a/dojo/templates/dojo/ad_hoc_findings.html b/dojo/templates/dojo/ad_hoc_findings.html index 6caceb143a6..1a4bc78d4e8 100644 --- a/dojo/templates/dojo/ad_hoc_findings.html +++ b/dojo/templates/dojo/ad_hoc_findings.html @@ -24,6 +24,8 @@

Add Findings to a Test

{% csrf_token %} {% include "dojo/form_fields.html" with form=form %} + {% block additional_forms %} + {% endblock additional_forms %} {% if jform %}

JIRA


diff --git a/dojo/templates/dojo/add_findings.html b/dojo/templates/dojo/add_findings.html index 966bab5590d..76fb340250e 100644 --- a/dojo/templates/dojo/add_findings.html +++ b/dojo/templates/dojo/add_findings.html @@ -57,6 +57,8 @@

{% csrf_token %} {% include "dojo/form_fields.html" with form=form %} + {% block additional_forms %} + {% endblock additional_forms %} {% if jform %}

JIRA


diff --git a/dojo/templates/dojo/add_group.html b/dojo/templates/dojo/add_group.html index 5e236adc313..b52a1740be5 100644 --- a/dojo/templates/dojo/add_group.html +++ b/dojo/templates/dojo/add_group.html @@ -17,12 +17,16 @@ {% block content %} {% csrf_token %}
- Default Information - {% include "dojo/form_fields.html" with form=form %} + {% block group_form %} + Default Information + {% include "dojo/form_fields.html" with form=form %} + {% endblock group_form %}
- Global Role - {% include "dojo/form_fields.html" with form=global_role_form %} + {% block global_role_form %} + Global Role + {% include "dojo/form_fields.html" with form=global_role_form %} + {% endblock global_role_form %}
diff --git a/dojo/templates/dojo/edit_finding.html b/dojo/templates/dojo/edit_finding.html index 4cc29a4f3c6..dc52e0fd2f0 100644 --- a/dojo/templates/dojo/edit_finding.html +++ b/dojo/templates/dojo/edit_finding.html @@ -46,6 +46,8 @@

GitHub

{% endif %} {% include "dojo/form_fields.html" with form=form %} + {% block additional_forms %} + {% endblock additional_forms %} {% if finding.duplicate_finding %} [original: diff --git a/dojo/templates/dojo/findings_list_snippet.html b/dojo/templates/dojo/findings_list_snippet.html index 755579ddad7..57f2d96d567 100644 --- a/dojo/templates/dojo/findings_list_snippet.html +++ b/dojo/templates/dojo/findings_list_snippet.html @@ -263,419 +263,423 @@

- {% if not product_tab or product_tab and product_tab.product|has_object_permission:"Finding_Edit" %} - + {% endif %} + + - {% endif %} - - - - - - - - {% if system_settings.enable_finding_sla %} + {% dojo_sort request 'Name' 'title' %} + - {% endif %} - - - - {% if system_settings.enable_jira %} - {% if jira_project and product_tab or not product_tab %} + + + + {% if system_settings.enable_finding_sla %} + {% endif %} + + + + {% if system_settings.enable_jira %} + {% if jira_project and product_tab or not product_tab %} + + + + {% endif %} + {% endif %} + {% if 'is_finding_groups_enabled'|system_setting_enabled %} - {% endif %} - {% endif %} - {% if 'is_finding_groups_enabled'|system_setting_enabled %} - {% endif %} - {% if show_product_column and product_tab is None %} - - {% endif %} - - + {% endblock header %} {% for finding in findings %} - {% if not product_tab or product_tab and product_tab.product|has_object_permission:"Finding_Edit" %} - - {% endif %} - + {% endif %} + - - + - - + + + - + + {% if system_settings.enable_finding_sla %} + {% endif %} - - - {% if system_settings.enable_finding_sla %} - {% endif %} - + + {% if system_settings.enable_jira %} + {% if jira_project and product_tab or not product_tab %} + + + + {% endif %} {% endif %} - - - - {% if system_settings.enable_jira %} - {% if jira_project and product_tab or not product_tab %} - - - {% endif %} - {% endif %} - {% if 'is_finding_groups_enabled'|system_setting_enabled %} - - {% endif %} - {% if show_product_column and product_tab is None %} - {% endif %} - - + + {% endblock body %} {% endfor %} @@ -698,37 +702,8 @@

{% block postscript %} + -{% endblock %} +{% endblock postscript %} diff --git a/dojo/templates/dojo/view_finding.html b/dojo/templates/dojo/view_finding.html index eac426e57c1..8feb62214a5 100755 --- a/dojo/templates/dojo/view_finding.html +++ b/dojo/templates/dojo/view_finding.html @@ -42,6 +42,9 @@

{{ finding.date | naturalday }} {% endif %} + {% if latest_test_import_finding_action %} + , Last Mentioned in (Re)Import: {{ latest_test_import_finding_action.created | naturalday }} as {{ latest_test_import_finding_action.get_action_display }} + {% endif %}

- - - {% if system_settings.enable_finding_sla %} - - {% endif %} - {% if finding.scanner_confidence %} - - {% endif %} - - {% if finding.risk_acceptance_set.all %} - - {% endif %} - {% if finding.duplicate_finding %} - - {% endif %} - {% if duplicate_cluster and not finding.duplicate %} - - {% elif duplicate_cluster and finding.duplicate %} - - {% endif %} - - - - {% if finding.publish_date %} - - {% endif %} - {% if finding.planned_remediation_date %} - - {% endif%} - {% if finding.planned_remediation_version %} - - {% endif %} - - {% if finding.mitigated %} - - - {% endif %} - - - + {% block header_head %} + + + {% if system_settings.enable_finding_sla %} + + {% endif %} + {% if finding.scanner_confidence %} + + {% endif %} + + {% if finding.risk_acceptance_set.all %} + + {% endif %} + {% if finding.duplicate_finding %} + + {% endif %} + {% if duplicate_cluster and not finding.duplicate %} + + {% elif duplicate_cluster and finding.duplicate %} + + {% endif %} + + + + {% if finding.publish_date %} + + {% endif %} + {% if finding.planned_remediation_date %} + + {% endif%} + {% if finding.planned_remediation_version %} + + {% endif %} + + {% if finding.mitigated %} + + + {% endif %} + + + + {% endblock header_head %} - - + + {% if system_settings.enable_finding_sla %} + + {% endif %} + {% if finding.scanner_confidence %} + + {% endif %} + - {% if system_settings.enable_finding_sla %} - - {% endif %} - {% if finding.scanner_confidence %} - - {% endif %} - - {% if finding.risk_acceptance_set.all %} - - {% endif %} - {% if finding.duplicate_finding %} + {% if finding.risk_acceptance_set.all %} + + {% endif %} + {% if finding.duplicate_finding %} + + {% endif %} + {% if duplicate_cluster %} - {% endif %} - {% if duplicate_cluster %} - - {% endif %} - - - - {% if finding.publish_date %} - - {% endif %} - {% if finding.planned_remediation_date %} - - {% endif %} - {% if finding.planned_remediation_version %} - - {% endif %} - - {% if finding.mitigated %} - - - {% endif %} - + + {% if finding.publish_date %} + {% endif %} - {% endif %} - - - {% endwith %} + {% if finding.planned_remediation_date %} + + {% endif %} + {% if finding.planned_remediation_version %} + + {% endif %} + + {% if finding.mitigated %} + + + {% endif %} + + + + {% endwith %} + {% endblock header_body %}
- + {% block header %} + {% if not product_tab or product_tab and product_tab.product|has_object_permission:"Finding_Edit" %} + + + + {% trans "Severity" %} - {% trans "Severity" %} - - {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} - {% dojo_sort request 'Name' 'title' %} - - {% trans "CWE" %} - - {% trans "Vulnerability Id" %} - - {% if filter_name == 'Closed' %} - {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} - {% dojo_sort request 'Closed Date' 'mitigated' %} - {% else %} + {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} - {% dojo_sort request 'Date' 'date' %} - {% endif %} - - {% trans "Age" %} - - {% trans "SLA" %} + {% trans "CWE" %} - {% trans "Reporter" %} - - {% trans "Found By" %} - - {% trans "Status" %} - + {% trans "Vulnerability Id" %} + + {% if filter_name == 'Closed' %} + {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} + {% dojo_sort request 'Closed Date' 'mitigated' %} + {% else %} + {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} + {% dojo_sort request 'Date' 'date' %} + {% endif %} + + {% trans "Age" %} + - {% trans "Jira" %} + {% trans "SLA" %} + {% trans "Reporter" %} + + {% trans "Found By" %} + + {% trans "Status" %} + + {% trans "Jira" %} + + {% trans "JIRA Age" %} + + {% trans "JIRA Change" %} + - {% trans "JIRA Age" %} + {% trans "Group" %} - {% trans "JIRA Change" %} + {% endif %} + {% if show_product_column and product_tab is None %} + + {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} + {% dojo_sort request 'Product' 'test__engagement__product__name' %} - {% trans "Group" %} + {% trans "Service" %} - {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} - {% dojo_sort request 'Product' 'test__engagement__product__name' %} + + {% trans "Planned Remediation" %} - {% trans "Service" %} - - {% trans "Planned Remediation" %} -
-
- -
-
-
-
+
+ +
+
+
+
- -
- - {{ finding.severity_display }} - - - {% if finding.title %} - {{ finding.title|truncatechars_html:60 }} - {% else %} - {{ finding.id }} - {% endif %} - {% if finding.file_path %} - - {% endif %} - {% if finding.endpoints.all %} - + + {{ finding.severity_display }} + + + {% if finding.title %} + {{ finding.title|truncatechars_html:60 }} + {% else %} + {{ finding.id }} + {% endif %} + {% if finding.file_path %} + + {% endif %} + {% if finding.endpoints.all %} + - {% endif %} - {% if finding.component_name %} - - {% endif %} - {% if finding.notes.all %} - - - ({{ finding.notes.count }}) - - {% endif %} - {% include "dojo/snippets/tags.html" with tags=finding.tags.all %} - - {% if finding.cwe > 0 %} - - {{ finding.cwe|default:"" }} - - {% endif %} - - {% with finding|first_vulnerability_id as first_vulnerability_id %} - {% if first_vulnerability_id %} - {% if first_vulnerability_id|has_vulnerability_url %} - - {{ first_vulnerability_id }} + {% else %} + ✕ {{ endpoint_status.endpoint }} +
+ {% endif %} + {% endfor %} + " data-placement="right" data-container="body" data-original-title="Endpoints ({{ finding.active_endpoint_count }} Active, {{ finding.mitigated_endpoint_count }} Mitigated)" title=""> + {% endif %} + {% if finding.component_name %} + + {% endif %} + {% if finding.notes.all %} + +
+ ({{ finding.notes.count }}) + + {% endif %} + {% include "dojo/snippets/tags.html" with tags=finding.tags.all %} +
+ {% if finding.cwe > 0 %} + + {{ finding.cwe|default:"" }} - {% else %} - {{ first_vulnerability_id }} + {% endif %} + + {% with finding|first_vulnerability_id as first_vulnerability_id %} + {% if first_vulnerability_id %} + {% if first_vulnerability_id|has_vulnerability_url %} + + {{ first_vulnerability_id }} + + {% else %} + {{ first_vulnerability_id }} + {% endif %} {% endif %} + {% endwith %} + + {% if filter_name == 'Closed' %} + {{ finding.mitigated|date }} + {% else %} + {{ finding.date }} {% endif %} - {% endwith %} - - {% if filter_name == 'Closed' %} - {{ finding.mitigated|date }} - {% else %} - {{ finding.date }} + + {{ finding.age }} + + {{ finding|finding_sla }} + - {{ finding.age }} - - {{ finding|finding_sla }} + {% if finding.reporter.get_full_name and finding.reporter.get_full_name.strip %} + {{ finding.reporter.get_full_name }} + {% else %} + {{ finding.reporter }} + {% endif %} - {% if finding.reporter.get_full_name and finding.reporter.get_full_name.strip %} - {{ finding.reporter.get_full_name }} - {% else %} - {{ finding.reporter }} + + {{ finding.found_by.all|join:", " }} + + {{ finding|finding_display_status|safe }} {{ finding|import_history }} + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_key }} + {% elif finding.has_jira_issue %} + {{ finding | jira_key }} + {% endif %} + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_creation | timesince }} + {% else %} + {{ finding | jira_creation | timesince }} + {% endif %} + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_change | timesince }} + {% else %} + {{ finding | jira_change | timesince }} + {% endif %} + - {{ finding.found_by.all|join:", " }} - - {{ finding|finding_display_status|safe }} {{ finding|import_history }} - - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_key }} - {% elif finding.has_jira_issue %} - {{ finding | jira_key }} - {% endif %} - - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_creation | timesince }} - {% else %} - {{ finding | jira_creation | timesince }} + {% if 'is_finding_groups_enabled'|system_setting_enabled %} + + {% if finding.has_finding_group %} + {{ finding.finding_group.name }} {% endif %} - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_change | timesince }} - {% else %} - {{ finding | jira_change | timesince }} - {% endif %} + {% endif %} + {% if show_product_column and product_tab is None %} + + {{ finding.test.engagement.product }} - {% if finding.has_finding_group %} - {{ finding.finding_group.name }} - {% endif %} - - {{ finding.test.engagement.product }} + {% if finding.service %}{{ finding.service }}{% endif %} - {% if finding.service %}{{ finding.service }}{% endif %} - - {% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} - + {% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} +
IDSeveritySLAScanner ConfidenceStatusRisk AcceptanceOriginalDuplicatesDuplicate ClusterTypeDate discoveredAgeVuln Publish datePlanned Remediation{% trans "Planned Remediation version" %}ReporterDate MitigatedMitigated ByCWEVulnerability IdFound byIDSeveritySLAScanner ConfidenceStatusRisk AcceptanceOriginalDuplicatesDuplicate ClusterTypeDate discoveredAgeVuln Publish datePlanned Remediation{% trans "Planned Remediation version" %}ReporterDate MitigatedMitigated ByCWEVulnerability IdFound by
{{ finding.id }} - - {% if finding.severity %} - {% if finding.cvssv3 %} - - {% endif %} - {{ finding.severity_display }} - {% if finding.cvssv3_score %} - ({{ finding.cvssv3_score }}) + {% block header_body %} + {{ finding.id }} + + {% if finding.severity %} + {% if finding.cvssv3 %} + + {% endif %} + {{ finding.severity_display }} + {% if finding.cvssv3_score %} + ({{ finding.cvssv3_score }}) + {% endif %} + {% if finding.cvssv3 %} + + {% endif %} + {% else %} + Unknown {% endif %} - {% if finding.cvssv3 %} - + + + {{ finding|finding_sla }} + {{finding.get_scanner_confidence_text}} + {% comment %} + {% if finding.duplicate %} + {% include "dojo/finding_related_actions.html" with similar_finding=finding finding_context=finding intro=finding|finding_display_status|safe %} + {% else %} + {{ finding|finding_display_status|safe }} {% endif %} - {% else %} - Unknown - {% endif %} - - - {{ finding|finding_sla }} - {{finding.get_scanner_confidence_text}} - {% comment %} - {% if finding.duplicate %} - {% include "dojo/finding_related_actions.html" with similar_finding=finding finding_context=finding intro=finding|finding_display_status|safe %} - {% else %} + {% endcomment %} {{ finding|finding_display_status|safe }} - {% endif %} - {% endcomment %} - {{ finding|finding_display_status|safe }} -  {{ finding|import_history }} - - {% for ra in finding.risk_acceptance_set.all|slice:":5" %} - - {% endfor %} +  {{ finding|import_history }} + {% for ra in finding.risk_acceptance_set.all|slice:":5" %} + + {% endfor %} + + + -
- -
-
- {% if finding.static_finding and finding.dynamic_finding > 0 %} - Static/Dynamic - {% elif finding.static_finding > 0 %} - Static - {% else %} - Dynamic {% endif %} - {{ finding.date }}{{ finding.age }} days{{ finding.publish_date }}{{ finding.planned_remediation_date }}{{ finding.planned_remediation_version }}{{ finding.reporter }}{{ finding.mitigated }}{{ finding.mitigated_by }} - {% if finding.cwe > 0 %} - - {{ finding.cwe }} - - {% endif %} + {% if finding.static_finding and finding.dynamic_finding > 0 %} + Static/Dynamic + {% elif finding.static_finding > 0 %} + Static + {% else %} + Dynamic + {% endif %} - {% with finding|first_vulnerability_id as first_vulnerability_id %} - {% if first_vulnerability_id %} - {% if first_vulnerability_id|has_vulnerability_url %} - - {{ first_vulnerability_id }} - - {% else %} - {{ first_vulnerability_id }} + {{ finding.date }}{{ finding.age }} days{{ finding.publish_date }} {% for scanner in found_by %} - {{ scanner }} - {% endfor %}{{ finding.planned_remediation_date }}{{ finding.planned_remediation_version }}{{ finding.reporter }}{{ finding.mitigated }}{{ finding.mitigated_by }} + {% if finding.cwe > 0 %} + + {{ finding.cwe }} + + {% endif %} + + {% with finding|first_vulnerability_id as first_vulnerability_id %} + {% if first_vulnerability_id %} + {% if first_vulnerability_id|has_vulnerability_url %} + + {{ first_vulnerability_id }} + + {% else %} + {{ first_vulnerability_id }} + {% endif %} + {% endif %} + {% for scanner in found_by %} + {{ scanner }} + {% endfor %}

@@ -771,6 +778,107 @@

Similar Findings ({{ similar_findings.paginator.count }}

+ + {% if 'TRACK_IMPORT_HISTORY'|setting_enabled and latest_test_import_finding_action %} +
+
+
+

+ {% trans "Import History" %} ({{ test_import_finding_actions_count }}) + + + +

+
+
+
+ {% include "dojo/filter_snippet.html" with form=test_import_filter.form %} + {% include "dojo/filter_snippet.html" with form=test_import_finding_action_filter.form %} +
+
+ {% if paged_test_import_finding_actions %} + + + + + + + + + + + + + + + {% for test_import_finding_action in paged_test_import_finding_actions %} + + + + + + + + + + + + {% endfor %} + +
{% trans "Action" %}{% trans "Date/Time" %}{% trans "Import Type" %}{% trans "Branch/Tag" %}{% trans "Build ID" %}{% trans "Commit" %}{% trans "Version" %}{% trans "Endpoint" %}
+ {{ test_import_finding_action.get_action_display }} + + + {{ test_import_finding_action.test_import.created|date:"DATETIME_FORMAT" }} + + {{ test_import_finding_action.test_import|import_settings_tag }} + + {{ test_import_finding_action.test_import.type }} + + {{ test_import_finding_action.test_import.branch_tag|default_if_none:"" }} + + {{ test_import_finding_action.test_import.build_id|default_if_none:"" }} + + {{ test_import_finding_action.test_import.commit_hash|default_if_none:"" }} + + {{ test_import_finding_action.test_import.version|default_if_none:"" }} + + {{ test_import_finding_action.test_import.import_settings.endpoint|default_if_none:"" }} +
+ {% else %} +
+

+ {% trans "No import history found." %} +

+
+ {% endif %} +
+ {% include "dojo/paging_snippet.html" with page=paged_test_import_finding_actions prefix='test_import_finding_actions' page_size=True %} +
+
+
+ {% endif %} + + {% include "dojo/snippets/endpoints.html" with finding=finding destination="UI" %}
diff --git a/dojo/templates/dojo/view_group.html b/dojo/templates/dojo/view_group.html index 32f5425cf02..afa8b1ccc59 100644 --- a/dojo/templates/dojo/view_group.html +++ b/dojo/templates/dojo/view_group.html @@ -271,28 +271,30 @@

- - - - - - - - - - - - - - - - + {% block metadata %} + + + + + + + + + + + + + + + + + {% endblock metadata %}
Number of Users - {{ group.users.all|length }} -
Number of Product Types - {{ product_types|length }} -
Number of Products - {{ products|length }} -
Global role{% if group.global_role.role %} {{ group.global_role.role }} {% endif %}
Number of Users + {{ group.users.all|length }} +
Number of Product Types + {{ product_types|length }} +
Number of Products + {{ products|length }} +
Global role{% if group.global_role.role %} {{ group.global_role.role }} {% endif %}

diff --git a/dojo/templates/dojo/view_test.html b/dojo/templates/dojo/view_test.html index 832f1ed6612..7e4ed933585 100644 --- a/dojo/templates/dojo/view_test.html +++ b/dojo/templates/dojo/view_test.html @@ -858,397 +858,401 @@

class="table-striped tablesorter-bootstrap table table-condensed table-hover"> - {% if test|has_object_permission:"Test_Edit" or test|has_object_permission:"Test_Delete" %} - - + {% block header %} + {% if test|has_object_permission:"Test_Edit" or test|has_object_permission:"Test_Delete" %} + + + + {% endif %} + + + {% trans "Severity" %} - {% endif %} - - - {% trans "Severity" %} - - - {% trans "Name" %} - - - {% trans "CWE" %} - - - {% trans "Vulnerability Id" %} - - - {% trans "Date" %} - - - {% trans "Age" %} - - {% if system_settings.enable_finding_sla %} - {% trans "SLA" %} + {% trans "Name" %} - {% endif %} - - {% trans "Reporter" %} - - - {% trans "Status" %} - - {% if system_settings.enable_jira %} - {% if jira_project and product_tab or not product_tab %} - - {% trans "Jira" %} - + + {% trans "CWE" %} + + + {% trans "Vulnerability Id" %} + + + {% trans "Date" %} + + + {% trans "Age" %} + + {% if system_settings.enable_finding_sla %} - {% trans "Jira Age" %} + {% trans "SLA" %} + {% endif %} + + {% trans "Reporter" %} + + + {% trans "Status" %} + + {% if system_settings.enable_jira %} + {% if jira_project and product_tab or not product_tab %} + + {% trans "Jira" %} + + + {% trans "Jira Age" %} + + + {% trans "Jira Change" %} + + {% endif %} + {% endif %} + {% if 'is_finding_groups_enabled'|system_setting_enabled %} - {% trans "Jira Change" %} + {% trans "Group" %} {% endif %} - {% endif %} - {% if 'is_finding_groups_enabled'|system_setting_enabled %} - {% trans "Group" %} + {% trans "Planned Remediation" %} - {% endif %} - - {% trans "Planned Remediation" %} - + {% endblock header %} {% for finding in findings %} - {% if test|has_object_permission:"Test_Edit" or test|has_object_permission:"Test_Delete" %} - -
- -
- - {% endif %} - -
-
- - - - - {{ finding.severity_display }} - - - - {% if finding.title %} - {{ finding.title|truncatechars_html:60 }} - {% else %} - {{ finding.id }} - {% endif %} - {% if finding.file_path %} - - {% endif %} - {% if finding.endpoints.all %} - + + {{ finding.severity_display }} + + + + {% if finding.title %} + {{ finding.title|truncatechars_html:60 }} + {% else %} + {{ finding.id }} + {% endif %} + {% if finding.file_path %} + + {% endif %} + {% if finding.endpoints.all %} + + {% endif %} + {% if finding.component_name %} + + {% endif %} + {% if finding.notes.all %} + + + ({{ finding.notes.count }}) + + {% endif %} + {% include "dojo/snippets/tags.html" with tags=finding.tags.all %} + + + {% if finding.cwe > 0 %} + + {{ finding.cwe }} + + {% endif %} + + + {% with finding|first_vulnerability_id as first_vulnerability_id %} + {% if first_vulnerability_id %} + {% if first_vulnerability_id|has_vulnerability_url %} + + {{ first_vulnerability_id|default:"" }} + + {% else %} + {{ first_vulnerability_id }} + {% endif %} {% endif %} - {% endfor %} - " data-placement="right" data-container="body" data-original-title="Endpoints ({{ finding.active_endpoint_count }} Active, {{ finding.mitigated_endpoint_count }} Mitigated)" title=""> - {% endif %} - {% if finding.component_name %} - - {% endif %} - {% if finding.notes.all %} - - - ({{ finding.notes.count }}) - - {% endif %} - {% include "dojo/snippets/tags.html" with tags=finding.tags.all %} - - - {% if finding.cwe > 0 %} - - {{ finding.cwe }} - + {% endwith %} + + + {{ finding.date }} + + + {{ finding.age }} + + {% if system_settings.enable_finding_sla %} + + {{ finding|finding_sla }} + {% endif %} - - - {% with finding|first_vulnerability_id as first_vulnerability_id %} - {% if first_vulnerability_id %} - {% if first_vulnerability_id|has_vulnerability_url %} - - {{ first_vulnerability_id|default:"" }} - - {% else %} - {{ first_vulnerability_id }} - {% endif %} - {% endif %} - {% endwith %} - - - {{ finding.date }} - - - {{ finding.age }} - - {% if system_settings.enable_finding_sla %} - {{ finding|finding_sla }} + {% if finding.reporter.get_full_name and finding.reporter.get_full_name.strip %} + {{ finding.reporter.get_full_name }} + {% else %} + {{ finding.reporter }} + {% endif %} - {% endif %} - - {% if finding.reporter.get_full_name and finding.reporter.get_full_name.strip %} - {{ finding.reporter.get_full_name }} - {% else %} - {{ finding.reporter }} + + {{ finding|finding_display_status|safe }} {{ finding|import_history }} + + {% if system_settings.enable_jira %} + {% if jira_project and product_tab or not product_tab %} + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_key }} + {% elif finding.has_jira_issue %} + {{ finding | jira_key }} + {% endif %} + + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_creation | timesince }} + {% else %} + {{ finding | jira_creation | timesince }} + {% endif %} + + + {% if finding.has_jira_group_issue %} + {{ finding.finding_group | jira_change | timesince }} + {% else %} + {{ finding | jira_change | timesince }} + {% endif %} + + {% endif %} {% endif %} - - - {{ finding|finding_display_status|safe }} {{ finding|import_history }} - - {% if system_settings.enable_jira %} - {% if jira_project and product_tab or not product_tab %} - - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_key }} - {% elif finding.has_jira_issue %} - {{ finding | jira_key }} - {% endif %} - - - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_creation | timesince }} - {% else %} - {{ finding | jira_creation | timesince }} - {% endif %} - - - {% if finding.has_jira_group_issue %} - {{ finding.finding_group | jira_change | timesince }} - {% else %} - {{ finding | jira_change | timesince }} + {% if 'is_finding_groups_enabled'|system_setting_enabled %} + + {% if finding.has_finding_group %} + {{ finding.finding_group.name }} {% endif %} {% endif %} - {% endif %} - {% if 'is_finding_groups_enabled'|system_setting_enabled %} - - {% if finding.has_finding_group %} - {{ finding.finding_group.name }} - {% endif %} + + {% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} - {% endif %} - - {% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} - + {% endblock body %} {% endfor %} @@ -1555,36 +1559,14 @@

{% trans "ProTip!" %} {% trans "Type" %}s e {% trans "to edit this test. Type" %} a {% trans "to add a finding to this test." %} {% endblock %} + {% block postscript %} {{ block.super }} - +