diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml
index 00ffd8aff16..de040266a13 100644
--- a/.github/workflows/build-docker-images-for-testing.yml
+++ b/.github/workflows/build-docker-images-for-testing.yml
@@ -35,7 +35,7 @@ jobs:
- name: Build
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
timeout-minutes: 10
with:
context: .
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
index 36c951cafec..78b7e373386 100644
--- a/.github/workflows/integration-tests.yml
+++ b/.github/workflows/integration-tests.yml
@@ -36,7 +36,7 @@ jobs:
"tests/tool_config.py",
"openapi-validatator",
]
- profile: ["mysql-rabbitmq", "postgres-redis"]
+ profile: ["postgres-rabbitmq", "postgres-redis"]
os: [alpine, debian]
fail-fast: false
@@ -59,10 +59,10 @@ jobs:
- name: Set integration-test mode
run: ln -s docker-compose.override.integration_tests.yml docker-compose.override.yml
- # phased startup with MySQL and RabbitMQ so we can use the exit code from integrationtest container
- - name: Start Dojo MySQL + RabbitMQ
- if: matrix.profile == 'mysql-rabbitmq'
- run: docker compose --profile ${{ matrix.profile }} --env-file ./docker/environments/${{ matrix.profile }}.env up --no-deps -d mysql nginx celerybeat celeryworker mailhog uwsgi rabbitmq
+ # phased startup with PostgreSQL and RabbitMQ so we can use the exit code from integrationtest container
+ - name: Start Dojo PostgreSQL + RabbitMQ
+ if: matrix.profile == 'postgres-rabbitmq'
+ run: docker compose --profile ${{ matrix.profile }} --env-file ./docker/environments/${{ matrix.profile }}.env up --no-deps -d postgres nginx celerybeat celeryworker mailhog uwsgi rabbitmq
env:
DJANGO_VERSION: ${{ matrix.os }}
NGINX_VERSION: ${{ matrix.os }}
diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml
index 87d23a2a4db..6e167143783 100644
--- a/.github/workflows/release-x-manual-docker-containers.yml
+++ b/.github/workflows/release-x-manual-docker-containers.yml
@@ -63,7 +63,7 @@ jobs:
- name: Build and push images with debian
if: ${{ matrix.os == 'debian' }}
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
env:
REPO_ORG: ${{ env.repoorg }}
docker-image: ${{ matrix.docker-image }}
@@ -77,7 +77,7 @@ jobs:
- name: Build and push images with alpine
if: ${{ matrix.os == 'alpine' }}
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
env:
REPO_ORG: ${{ env.repoorg }}
docker-image: ${{ matrix.docker-image }}
diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml
index 134fbbef319..900e3e5fa40 100644
--- a/.github/workflows/rest-framework-tests.yml
+++ b/.github/workflows/rest-framework-tests.yml
@@ -34,20 +34,20 @@ jobs:
run: docker/setEnv.sh unit_tests_cicd
# phased startup so we can use the exit code from unit test container
- - name: Start MySQL
- run: docker compose --env-file ./docker/environments/mysql-redis.env up -d mysql
+ - name: Start Postgres
+ run: docker compose --env-file ./docker/environments/postgres-redis.env up -d postgres
# no celery or initializer needed for unit tests
- name: Unit tests
timeout-minutes: 10
- run: docker compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env up --no-deps --exit-code-from uwsgi uwsgi
+ run: docker compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env up --no-deps --exit-code-from uwsgi uwsgi
env:
DJANGO_VERSION: ${{ matrix.os }}
- name: Logs
if: failure()
- run: docker compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env logs --tail="2500" uwsgi
+ run: docker compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env logs --tail="2500" uwsgi
- name: Shutdown
if: always()
- run: docker compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env down
+ run: docker compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env down
diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine
index c5e01860a49..b9a55ac4158 100644
--- a/Dockerfile.nginx-alpine
+++ b/Dockerfile.nginx-alpine
@@ -140,7 +140,7 @@ COPY manage.py ./
COPY dojo/ ./dojo/
RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true
-FROM nginx:1.27.0-alpine@sha256:69f8c2c72671490607f52122be2af27d4fc09657ff57e42045801aa93d2090f7
+FROM nginx:1.27.0-alpine@sha256:a45ee5d042aaa9e81e013f97ae40c3dda26fbe98f22b6251acdf28e579560d55
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian
index 14edac5c529..a1fd76d05fe 100644
--- a/Dockerfile.nginx-debian
+++ b/Dockerfile.nginx-debian
@@ -75,7 +75,7 @@ COPY dojo/ ./dojo/
RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true
-FROM nginx:1.27.0-alpine@sha256:69f8c2c72671490607f52122be2af27d4fc09657ff57e42045801aa93d2090f7
+FROM nginx:1.27.0-alpine@sha256:a45ee5d042aaa9e81e013f97ae40c3dda26fbe98f22b6251acdf28e579560d55
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
diff --git a/components/package.json b/components/package.json
index 688f4f5d52f..ab3201e6a41 100644
--- a/components/package.json
+++ b/components/package.json
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
- "version": "2.36.0-dev",
+ "version": "2.37.0-dev",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
diff --git a/docker-compose.yml b/docker-compose.yml
index d71da59c498..36dec075328 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -141,7 +141,7 @@ services:
volumes:
- defectdojo_data:/var/lib/mysql
postgres:
- image: postgres:16.3-alpine@sha256:e89da2c083a5405943408b6807cd1fd25dc9010c1294e30611b841778bedc653
+ image: postgres:16.3-alpine@sha256:de3d7b6e4b5b3fe899e997579d6dfe95a99539d154abe03f0b6839133ed05065
profiles:
- postgres-rabbitmq
- postgres-redis
@@ -159,7 +159,7 @@ services:
volumes:
- defectdojo_rabbitmq:/var/lib/rabbitmq
redis:
- image: redis:7.2.5-alpine@sha256:0389bb8416d7c6ed065c25745179bf5d358e5d9472dd30a687ab36ffbb650262
+ image: redis:7.2.5-alpine@sha256:de14eedfbd1fc871d0f5aa1773fd80743930e45354d035b6f3b551e7ffa44df8
profiles:
- mysql-redis
- postgres-redis
diff --git a/docker/entrypoint-unit-tests-devDocker.sh b/docker/entrypoint-unit-tests-devDocker.sh
index c590974b1b4..6872d8668fc 100755
--- a/docker/entrypoint-unit-tests-devDocker.sh
+++ b/docker/entrypoint-unit-tests-devDocker.sh
@@ -53,7 +53,7 @@ EOF
echo "Unit Tests"
echo "------------------------------------------------------------"
-python3 manage.py test unittests -v 3 --keepdb --no-input
+python3 manage.py test unittests -v 3 --keepdb --no-input --shuffle
# you can select a single file to "test" unit tests
# python3 manage.py test unittests.tools.test_npm_audit_scan_parser.TestNpmAuditParser --keepdb -v 3
diff --git a/docker/entrypoint-unit-tests.sh b/docker/entrypoint-unit-tests.sh
index 6c45ce489d6..a356283c377 100755
--- a/docker/entrypoint-unit-tests.sh
+++ b/docker/entrypoint-unit-tests.sh
@@ -79,4 +79,4 @@ python3 manage.py migrate
echo "Unit Tests"
echo "------------------------------------------------------------"
-python3 manage.py test unittests -v 3 --keepdb --no-input
+python3 manage.py test unittests -v 3 --keepdb --no-input --shuffle
diff --git a/docker/install_chrome_dependencies.py b/docker/install_chrome_dependencies.py
index 5f4f714a430..2bf949c86ca 100644
--- a/docker/install_chrome_dependencies.py
+++ b/docker/install_chrome_dependencies.py
@@ -1,3 +1,4 @@
+# noqa: INP001
"""
This solution is largely based on the Playwright's browser dependencies script at
https://github.com/microsoft/playwright/blob/main/utils/linux-browser-dependencies/inside_docker/list_dependencies.js
diff --git a/docs/content/en/getting_started/upgrading/2.36.md b/docs/content/en/getting_started/upgrading/2.36.md
new file mode 100644
index 00000000000..260c86960de
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.36.md
@@ -0,0 +1,7 @@
+---
+title: 'Upgrading to DefectDojo Version 2.36.x'
+toc_hide: true
+weight: -20240603
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.36.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.36.0) for the contents of the release.
diff --git a/docs/content/en/integrations/parsers/file/deepfence_threatmapper.md b/docs/content/en/integrations/parsers/file/deepfence_threatmapper.md
new file mode 100644
index 00000000000..84044fb72b4
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/deepfence_threatmapper.md
@@ -0,0 +1,8 @@
+---
+title: "Deepfence Threatmapper"
+toc_hide: true
+---
+Import compliance, malware, secret, vulnerability reports from [Deepfence Threatmapper](https://github.com/deepfence/ThreatMapper) in XLSX file format.
+
+### Sample Scan Data
+Sample Threatmapper scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/deepfence_threatmapper). In this link are both .xlsx and .csv listed. They contain the same content, but csv can be read in the Browser, but only xlsx is supported by the parser.
\ No newline at end of file
diff --git a/dojo/__init__.py b/dojo/__init__.py
index 423f4050e5c..707177ee3ee 100644
--- a/dojo/__init__.py
+++ b/dojo/__init__.py
@@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401
-__version__ = '2.36.0-dev'
+__version__ = '2.37.0-dev'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'https://documentation.defectdojo.com'
diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py
index fe8fe2d60b7..4de5d536d07 100644
--- a/dojo/api_v2/serializers.py
+++ b/dojo/api_v2/serializers.py
@@ -559,6 +559,8 @@ def validate(self, data):
class UserContactInfoSerializer(serializers.ModelSerializer):
+ user_profile = UserSerializer(many=False, source="user", read_only=True)
+
class Meta:
model = UserContactInfo
fields = "__all__"
diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py
index cbe2c9cb58e..d0fe775b070 100644
--- a/dojo/api_v2/views.py
+++ b/dojo/api_v2/views.py
@@ -2967,7 +2967,7 @@ def report_generate(request, obj, options):
report_name = "Finding"
else:
- raise Http404()
+ raise Http404
result = {
"product_type": product_type,
diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py
index 28885137156..8538101cf52 100644
--- a/dojo/authorization/authorization.py
+++ b/dojo/authorization/authorization.py
@@ -243,17 +243,17 @@ def user_has_global_permission(user, permission):
def user_has_configuration_permission_or_403(user, permission):
if not user_has_configuration_permission(user, permission):
- raise PermissionDenied()
+ raise PermissionDenied
def user_has_permission_or_403(user, obj, permission):
if not user_has_permission(user, obj, permission):
- raise PermissionDenied()
+ raise PermissionDenied
def user_has_global_permission_or_403(user, permission):
if not user_has_global_permission(user, permission):
- raise PermissionDenied()
+ raise PermissionDenied
def get_roles_for_permission(permission):
diff --git a/dojo/decorators.py b/dojo/decorators.py
index 664989f8ffc..c919a2995bc 100644
--- a/dojo/decorators.py
+++ b/dojo/decorators.py
@@ -182,7 +182,7 @@ def _wrapped(request, *args, **kw):
dojo_user = Dojo_User.objects.filter(username=username).first()
if dojo_user:
Dojo_User.enable_force_password_reset(dojo_user)
- raise Ratelimited()
+ raise Ratelimited
return fn(request, *args, **kw)
return _wrapped
return decorator
diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py
index b2fc5bff906..f0c542e2d96 100644
--- a/dojo/engagement/views.py
+++ b/dojo/engagement/views.py
@@ -114,7 +114,7 @@
def engagement_calendar(request):
if not get_system_setting('enable_calendar'):
- raise Resolver404()
+ raise Resolver404
if 'lead' not in request.GET or '0' in request.GET.getlist('lead'):
engagements = get_authorized_engagements(Permissions.Engagement_View)
@@ -1205,7 +1205,7 @@ def add_risk_acceptance(request, eid, fid=None):
finding = get_object_or_404(Finding, id=fid)
if not eng.product.enable_full_risk_acceptance:
- raise PermissionDenied()
+ raise PermissionDenied
if request.method == 'POST':
form = RiskAcceptanceForm(request.POST, request.FILES)
@@ -1283,7 +1283,7 @@ def view_edit_risk_acceptance(request, eid, raid, edit_mode=False):
eng = get_object_or_404(Engagement, pk=eid)
if edit_mode and not eng.product.enable_full_risk_acceptance:
- raise PermissionDenied()
+ raise PermissionDenied
risk_acceptance_form = None
errors = False
@@ -1455,7 +1455,7 @@ def reinstate_risk_acceptance(request, eid, raid):
eng = get_object_or_404(Engagement, pk=eid)
if not eng.product.enable_full_risk_acceptance:
- raise PermissionDenied()
+ raise PermissionDenied
ra_helper.reinstate(risk_acceptance, risk_acceptance.expiration_date)
diff --git a/dojo/finding/views.py b/dojo/finding/views.py
index d54baafb40c..f7624c996ce 100644
--- a/dojo/finding/views.py
+++ b/dojo/finding/views.py
@@ -1212,7 +1212,7 @@ def post(self, request: HttpRequest, finding_id):
# Handle the case of a successful form
if success:
return redirect_to_return_url_or_else(request, reverse("view_test", args=(finding.test.id,)))
- raise PermissionDenied()
+ raise PermissionDenied
@user_is_authorized(Finding, Permissions.Finding_Edit, "fid")
@@ -1500,7 +1500,7 @@ def apply_template_cwe(request, fid):
extra_tags="alert-danger",
)
else:
- raise PermissionDenied()
+ raise PermissionDenied
@user_is_authorized(Finding, Permissions.Finding_Edit, "fid")
@@ -1614,7 +1614,7 @@ def simple_risk_accept(request, fid):
finding = get_object_or_404(Finding, id=fid)
if not finding.test.engagement.product.enable_simple_risk_acceptance:
- raise PermissionDenied()
+ raise PermissionDenied
ra_helper.simple_risk_accept(finding)
@@ -1741,7 +1741,7 @@ def clear_finding_review(request, fid):
# the review or one of the users requested to provide the review, then
# do not allow the user to clear the review.
if user != finding.review_requested_by and user not in finding.reviewers.all():
- raise PermissionDenied()
+ raise PermissionDenied
# in order to clear a review for a finding, we need to capture why and how it was reviewed
# we can do this with a Note
@@ -2058,7 +2058,7 @@ def delete_stub_finding(request, fid):
extra_tags="alert-danger",
)
else:
- raise PermissionDenied()
+ raise PermissionDenied
@user_is_authorized(Stub_Finding, Permissions.Finding_Edit, "fid")
@@ -2442,7 +2442,7 @@ def delete_template(request, tid):
extra_tags="alert-danger",
)
else:
- raise PermissionDenied()
+ raise PermissionDenied
def download_finding_pic(request, token):
diff --git a/dojo/importers/__init__.py b/dojo/importers/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/dojo/metrics/views.py b/dojo/metrics/views.py
index 6428fb453dd..718b21cd019 100644
--- a/dojo/metrics/views.py
+++ b/dojo/metrics/views.py
@@ -583,7 +583,7 @@ def view_engineer(request, eid):
user = get_object_or_404(Dojo_User, pk=eid)
if not (request.user.is_superuser
or request.user.username == user.username):
- raise PermissionDenied()
+ raise PermissionDenied
now = timezone.now()
findings = Finding.objects.filter(reporter=user, verified=True)
diff --git a/dojo/models.py b/dojo/models.py
index 415bc6b4567..5de06d42743 100644
--- a/dojo/models.py
+++ b/dojo/models.py
@@ -3614,22 +3614,22 @@ class Risk_Acceptance(models.Model):
TREATMENT_FIX = 'F'
TREATMENT_TRANSFER = 'T'
- TREATMENT_CHOICES = [
- (TREATMENT_ACCEPT, 'Accept (The risk is acknowledged, yet remains)'),
- (TREATMENT_AVOID, 'Avoid (Do not engage with whatever creates the risk)'),
- (TREATMENT_MITIGATE, 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'),
- (TREATMENT_FIX, 'Fix (The risk is eradicated)'),
- (TREATMENT_TRANSFER, 'Transfer (The risk is transferred to a 3rd party)'),
- ]
-
TREATMENT_TRANSLATIONS = {
- 'A': 'Accept (The risk is acknowledged, yet remains)',
- 'V': 'Avoid (Do not engage with whatever creates the risk)',
- 'M': 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)',
- 'F': 'Fix (The risk is eradicated)',
- 'T': 'Transfer (The risk is transferred to a 3rd party)',
+ TREATMENT_ACCEPT: _('Accept (The risk is acknowledged, yet remains)'),
+ TREATMENT_AVOID: _('Avoid (Do not engage with whatever creates the risk)'),
+ TREATMENT_MITIGATE: _('Mitigate (The risk still exists, yet compensating controls make it less of a threat)'),
+ TREATMENT_FIX: _('Fix (The risk is eradicated)'),
+ TREATMENT_TRANSFER: _('Transfer (The risk is transferred to a 3rd party)'),
}
+ TREATMENT_CHOICES = [
+ (TREATMENT_ACCEPT, TREATMENT_TRANSLATIONS[TREATMENT_ACCEPT]),
+ (TREATMENT_AVOID, TREATMENT_TRANSLATIONS[TREATMENT_AVOID]),
+ (TREATMENT_MITIGATE, TREATMENT_TRANSLATIONS[TREATMENT_MITIGATE]),
+ (TREATMENT_FIX, TREATMENT_TRANSLATIONS[TREATMENT_FIX]),
+ (TREATMENT_TRANSFER, TREATMENT_TRANSLATIONS[TREATMENT_TRANSFER]),
+ ]
+
name = models.CharField(max_length=300, null=False, blank=False, help_text=_("Descriptive name which in the future may also be used to group risk acceptances together across engagements and products"))
accepted_findings = models.ManyToManyField(Finding)
diff --git a/dojo/notifications/views.py b/dojo/notifications/views.py
index 10616dd1b11..f20e45224fe 100644
--- a/dojo/notifications/views.py
+++ b/dojo/notifications/views.py
@@ -25,7 +25,7 @@ def get_notifications(self, request: HttpRequest):
def check_user_permissions(self, request: HttpRequest):
if not request.user.is_superuser:
- raise PermissionDenied()
+ raise PermissionDenied
def get_form(self, request: HttpRequest, notifications: Notifications):
# Set up the args for the form
diff --git a/dojo/product/views.py b/dojo/product/views.py
index 580bb2c6442..9a70751ae1a 100644
--- a/dojo/product/views.py
+++ b/dojo/product/views.py
@@ -839,7 +839,7 @@ def import_scan_results_prod(request, pid=None):
def new_product(request, ptid=None):
if get_authorized_product_types(Permissions.Product_Type_Add_Product).count() == 0:
- raise PermissionDenied()
+ raise PermissionDenied
jira_project_form = None
error = False
@@ -1822,7 +1822,7 @@ def edit_api_scan_configuration(request, pid, pascid):
if product_api_scan_configuration.product.pk != int(
pid): # user is trying to edit Tool Configuration from another product (trying to by-pass auth)
- raise Http404()
+ raise Http404
if request.method == 'POST':
form = Product_API_Scan_ConfigurationForm(request.POST, instance=product_api_scan_configuration)
@@ -1868,7 +1868,7 @@ def delete_api_scan_configuration(request, pid, pascid):
if product_api_scan_configuration.product.pk != int(
pid): # user is trying to delete Tool Configuration from another product (trying to by-pass auth)
- raise Http404()
+ raise Http404
if request.method == 'POST':
form = Product_API_Scan_ConfigurationForm(request.POST)
diff --git a/dojo/reports/views.py b/dojo/reports/views.py
index 113b70dbd18..b815c81eca1 100644
--- a/dojo/reports/views.py
+++ b/dojo/reports/views.py
@@ -123,7 +123,7 @@ def post(self, request: HttpRequest) -> HttpResponse:
self._set_state(request)
return render(request, self.get_template(), self.get_context())
else:
- raise PermissionDenied()
+ raise PermissionDenied
def _set_state(self, request: HttpRequest):
self.request = request
@@ -157,7 +157,7 @@ def get_template(self):
elif self.report_format == 'HTML':
return 'dojo/custom_html_report.html'
else:
- raise PermissionDenied()
+ raise PermissionDenied
def get_context(self):
return {
@@ -369,7 +369,7 @@ def product_endpoint_report(request, pid):
'title': 'Generate Report',
})
else:
- raise Http404()
+ raise Http404
product_tab = Product_Tab(product, "Product Endpoint Report", tab="endpoints")
return render(request,
@@ -609,7 +609,7 @@ def generate_report(request, obj, host_view=False):
'host': report_url_resolver(request),
'user_id': request.user.id}
else:
- raise Http404()
+ raise Http404
report_form = ReportOptionsForm()
@@ -665,7 +665,7 @@ def generate_report(request, obj, host_view=False):
})
else:
- raise Http404()
+ raise Http404
paged_findings = get_page_items(request, findings.qs.distinct().order_by('numerical_severity'), 25)
product_tab = None
diff --git a/dojo/risk_acceptance/helper.py b/dojo/risk_acceptance/helper.py
index 0159517ebfe..9ceedfaab47 100644
--- a/dojo/risk_acceptance/helper.py
+++ b/dojo/risk_acceptance/helper.py
@@ -272,7 +272,7 @@ def prefetch_for_expiration(risk_acceptances):
def simple_risk_accept(finding, perform_save=True):
if not finding.test.engagement.product.enable_simple_risk_acceptance:
- raise PermissionDenied()
+ raise PermissionDenied
logger.debug('accepting finding %i:%s', finding.id, finding)
finding.risk_accepted = True
diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum
index 4de58bdb1b8..4885a819303 100644
--- a/dojo/settings/.settings.dist.py.sha256sum
+++ b/dojo/settings/.settings.dist.py.sha256sum
@@ -1 +1 @@
-e9aab91c011f6aa1933791c57e7c37b165e5369606c459f772c4269c56212b53
+7b3bb14160f3ffce537d75895ee18cb0a561232fa964bae88b4861f7d289b176
diff --git a/dojo/settings/attribute-maps/__init__.py b/dojo/settings/attribute-maps/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py
index 4362b844053..e207309417c 100644
--- a/dojo/settings/settings.dist.py
+++ b/dojo/settings/settings.dist.py
@@ -1265,9 +1265,11 @@ def saml2_attrib_map_format(dict):
'MobSF Scan': ['title', 'description', 'severity'],
'OSV Scan': ['title', 'description', 'severity'],
'Snyk Code Scan': ['vuln_id_from_tool', 'file_path'],
+ 'Deepfence Threatmapper Report': ['title', 'description', 'severity'],
'Bearer CLI': ['title', 'severity'],
'Nancy Scan': ['title', 'vuln_id_from_tool'],
- 'Wiz Scan': ['title', 'description', 'severity']
+ 'Wiz Scan': ['title', 'description', 'severity'],
+ 'Kubescape JSON Importer': ['title', 'component_name']
}
# Override the hardcoded settings here via the env var
@@ -1485,6 +1487,8 @@ def saml2_attrib_map_format(dict):
'Nosey Parker Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE,
'Bearer CLI': DEDUPE_ALGO_HASH_CODE,
'Wiz Scan': DEDUPE_ALGO_HASH_CODE,
+ 'Deepfence Threatmapper Report': DEDUPE_ALGO_HASH_CODE,
+ 'Kubescape JSON Importer': DEDUPE_ALGO_HASH_CODE
}
# Override the hardcoded settings here via the env var
diff --git a/dojo/survey/views.py b/dojo/survey/views.py
index 3dc704fe6e9..091d68492e1 100644
--- a/dojo/survey/views.py
+++ b/dojo/survey/views.py
@@ -377,7 +377,7 @@ def edit_questionnaire_questions(request, sid):
survey = get_object_or_404(Engagement_Survey, id=sid)
if not user_has_configuration_permission(request.user, 'dojo.add_engagement_survey') and \
not user_has_configuration_permission(request.user, 'dojo.change_engagement_survey'):
- raise PermissionDenied()
+ raise PermissionDenied
answered_surveys = Answered_Survey.objects.filter(survey=survey)
reverted = False
@@ -548,7 +548,7 @@ def edit_question(request, qid):
elif type == 'dojo | choice question':
form = EditChoiceQuestionForm(instance=question)
else:
- raise Http404()
+ raise Http404
if request.method == 'POST':
if type == 'dojo | text question':
@@ -556,7 +556,7 @@ def edit_question(request, qid):
elif type == 'dojo | choice question':
form = EditChoiceQuestionForm(request.POST, instance=question)
else:
- raise Http404()
+ raise Http404
if form.is_valid():
form.save()
@@ -759,7 +759,7 @@ def answer_empty_survey(request, esid):
'You must be logged in to answer questionnaire. Otherwise, enable anonymous response in system settings.',
extra_tags='alert-danger')
# will render 403
- raise PermissionDenied()
+ raise PermissionDenied
questions = [
q.get_form()(
diff --git a/dojo/templates/notifications/mail/forgot_password.tpl b/dojo/templates/login/forgot_password.tpl
similarity index 100%
rename from dojo/templates/notifications/mail/forgot_password.tpl
rename to dojo/templates/login/forgot_password.tpl
diff --git a/dojo/templates/dojo/forgot_username.html b/dojo/templates/login/forgot_username.html
similarity index 100%
rename from dojo/templates/dojo/forgot_username.html
rename to dojo/templates/login/forgot_username.html
diff --git a/dojo/templates/notifications/mail/forgot_username.tpl b/dojo/templates/login/forgot_username.tpl
similarity index 100%
rename from dojo/templates/notifications/mail/forgot_username.tpl
rename to dojo/templates/login/forgot_username.tpl
diff --git a/dojo/templates/dojo/forgot_username_done.html b/dojo/templates/login/forgot_username_done.html
similarity index 100%
rename from dojo/templates/dojo/forgot_username_done.html
rename to dojo/templates/login/forgot_username_done.html
diff --git a/dojo/templates/dojo/forgot_username_subject.html b/dojo/templates/login/forgot_username_subject.html
similarity index 100%
rename from dojo/templates/dojo/forgot_username_subject.html
rename to dojo/templates/login/forgot_username_subject.html
diff --git a/dojo/templates/dojo/password_reset.html b/dojo/templates/login/password_reset.html
similarity index 100%
rename from dojo/templates/dojo/password_reset.html
rename to dojo/templates/login/password_reset.html
diff --git a/dojo/templates/dojo/password_reset_complete.html b/dojo/templates/login/password_reset_complete.html
similarity index 100%
rename from dojo/templates/dojo/password_reset_complete.html
rename to dojo/templates/login/password_reset_complete.html
diff --git a/dojo/templates/dojo/password_reset_confirm.html b/dojo/templates/login/password_reset_confirm.html
similarity index 100%
rename from dojo/templates/dojo/password_reset_confirm.html
rename to dojo/templates/login/password_reset_confirm.html
diff --git a/dojo/templates/dojo/password_reset_done.html b/dojo/templates/login/password_reset_done.html
similarity index 100%
rename from dojo/templates/dojo/password_reset_done.html
rename to dojo/templates/login/password_reset_done.html
diff --git a/dojo/templates/notifications/mail/scan_added.tpl b/dojo/templates/notifications/mail/scan_added.tpl
index ce09eecd4ca..513b24818db 100644
--- a/dojo/templates/notifications/mail/scan_added.tpl
+++ b/dojo/templates/notifications/mail/scan_added.tpl
@@ -16,40 +16,48 @@
{% blocktranslate %}{{ finding_count }} findings have been updated for while a scan was uploaded{% endblocktranslate %}:
{{product}} / {{ engagement.name }} / {{ test }}
- {% blocktranslate %}New findings{% endblocktranslate %}:
+
+ {% blocktranslate %}New findings{% endblocktranslate %} ({{ findings_new | length }})
{% for finding in findings_new %}
{% url 'view_finding' finding.id as finding_url %}
{{ finding.title }} ({{ finding.severity }})
{% empty %}
{% trans "None" %}
{% endfor %}
+
- {% blocktranslate %}Reactivated findings{% endblocktranslate %}:
+
+ {% blocktranslate %}Reactivated findings{% endblocktranslate %} ({{ findings_reactivated | length }})
{% for finding in findings_reactivated %}
{% url 'view_finding' finding.id as finding_url %}
{{ finding.title }} ({{ finding.severity }})
{% empty %}
{% trans "None" %}
{% endfor %}
+
- {% blocktranslate %}Closed findings{% endblocktranslate %}:
+
+ {% blocktranslate %}Closed findings{% endblocktranslate %} ({{ findings_mitigated | length }})
{% for finding in findings_mitigated %}
{% url 'view_finding' finding.id as finding_url %}
{{ finding.title }} ({{ finding.severity }})
{% empty %}
{% trans "None" %}
{% endfor %}
+
- {% blocktranslate %}Untouched findings{% endblocktranslate %}:
+
+ {% blocktranslate %}Untouched findings{% endblocktranslate %} ({{ findings_untouched | length }})
{% for finding in findings_untouched %}
{% url 'view_finding' finding.id as finding_url %}
{{ finding.title }} ({{ finding.severity }})
{% empty %}
{% trans "None" %}
{% endfor %}
+
{% trans "Kind regards" %},
diff --git a/dojo/test/views.py b/dojo/test/views.py
index bcb38514cd8..d15d518863d 100644
--- a/dojo/test/views.py
+++ b/dojo/test/views.py
@@ -402,7 +402,7 @@ def copy_test(request, tid):
def test_calendar(request):
if not get_system_setting('enable_calendar'):
- raise Resolver404()
+ raise Resolver404
if 'lead' not in request.GET or '0' in request.GET.getlist('lead'):
tests = get_authorized_tests(Permissions.Test_View)
diff --git a/dojo/tools/deepfence_threatmapper/__init__.py b/dojo/tools/deepfence_threatmapper/__init__.py
new file mode 100644
index 00000000000..3ad798a42b3
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/__init__.py
@@ -0,0 +1 @@
+__author__ = "manuel-sommer"
diff --git a/dojo/tools/deepfence_threatmapper/compliance.py b/dojo/tools/deepfence_threatmapper/compliance.py
new file mode 100644
index 00000000000..5cd4f5b6340
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/compliance.py
@@ -0,0 +1,54 @@
+from dojo.models import Finding
+
+
+class DeepfenceThreatmapperCompliance:
+ def get_findings(self, row, headers, test):
+ description = ""
+ compliance_check_type = row[headers["compliance_check_type"]]
+ count = row[headers["count"]]
+ doc_id = row[headers["doc_id"]]
+ host_name = row[headers["host_name"]]
+ cloud_account_id = row[headers["cloud_account_id"]]
+ masked = row[headers["masked"]]
+ node_id = row[headers["node_id"]]
+ node_name = row[headers["node_name"]]
+ node_type = row[headers["node_type"]]
+ status = row[headers["status"]]
+ test_category = row[headers["test_category"]]
+ test_desc = row[headers["test_desc"]]
+ test_info = row[headers["test_info"]]
+ test_number = row[headers["test_number"]]
+ description += "**compliance_check_type:** " + str(compliance_check_type) + "\n"
+ description += "**host_name:** " + str(host_name) + "\n"
+ description += "**cloud_account_id:** " + str(cloud_account_id) + "\n"
+ description += "**masked:** " + str(masked) + "\n"
+ description += "**node_id:** " + str(node_id) + "\n"
+ description += "**node_name:** " + str(node_name) + "\n"
+ description += "**node_type:** " + str(node_type) + "\n"
+ description += "**status:** " + str(status) + "\n"
+ description += "**test_category:** " + str(test_category) + "\n"
+ description += "**test_desc:** " + str(test_desc) + "\n"
+ description += "**test_info:** " + str(test_info) + "\n"
+ description += "**test_number:** " + str(test_number) + "\n"
+ description += "**count:** " + str(count) + "\n"
+ description += "**doc_id:** " + str(doc_id) + "\n"
+ finding = Finding(
+ title="Threatmapper_Compliance_Report-" + test_number,
+ description=description,
+ severity=self.compliance_severity(status),
+ static_finding=False,
+ dynamic_finding=True,
+ test=test,
+ )
+ return finding
+
+ def compliance_severity(self, input):
+ if input == "pass":
+ output = "Info"
+ elif input == "info":
+ output = "Info"
+ elif input == "warn":
+ output = "Medium"
+ else:
+ output = "Info"
+ return output
diff --git a/dojo/tools/deepfence_threatmapper/malware.py b/dojo/tools/deepfence_threatmapper/malware.py
new file mode 100644
index 00000000000..f1931e42623
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/malware.py
@@ -0,0 +1,39 @@
+from dojo.models import Finding
+
+
+class DeepfenceThreatmapperMalware:
+ def get_findings(self, row, headers, test):
+ description = ""
+ Rule_Name = row[headers["Rule Name"]]
+ Class = row[headers["Class"]]
+ File_Name = row[headers["File Name"]]
+ Summary = row[headers["Summary"]]
+ Severity = row[headers["Severity"]]
+ Node_Name = row[headers["Node Name"]]
+ NodeType = row[headers["NodeType"]]
+ Container_Name = row[headers["Container Name"]]
+ Kubernetes_Cluster_Name = row[headers["Kubernetes Cluster Name"]]
+ description += "**Summary:** " + str(Summary) + "\n"
+ description += "**Rule Name:** " + str(Rule_Name) + "\n"
+ description += "**Class:** " + str(Class) + "\n"
+ description += "**File Name:** " + str(File_Name) + "\n"
+ description += "**Node Name:** " + str(Node_Name) + "\n"
+ description += "**NodeType:** " + str(NodeType) + "\n"
+ description += "**Container Name:** " + str(Container_Name) + "\n"
+ description += "**Kubernetes Cluster Name:** " + str(Kubernetes_Cluster_Name) + "\n"
+ finding = Finding(
+ title=Rule_Name,
+ description=description,
+ file_path=File_Name,
+ severity=self.severity(Severity),
+ static_finding=False,
+ dynamic_finding=True,
+ test=test,
+ )
+ return finding
+
+ def severity(self, input):
+ if input is None:
+ return "Info"
+ else:
+ return input.capitalize()
diff --git a/dojo/tools/deepfence_threatmapper/parser.py b/dojo/tools/deepfence_threatmapper/parser.py
new file mode 100644
index 00000000000..3f5fd2a5a18
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/parser.py
@@ -0,0 +1,40 @@
+from openpyxl import load_workbook
+
+from dojo.tools.deepfence_threatmapper.compliance import DeepfenceThreatmapperCompliance
+from dojo.tools.deepfence_threatmapper.malware import DeepfenceThreatmapperMalware
+from dojo.tools.deepfence_threatmapper.secret import DeepfenceThreatmapperSecret
+from dojo.tools.deepfence_threatmapper.vulnerability import DeepfenceThreatmapperVulnerability
+
+
+class DeepfenceThreatmapperParser:
+ def get_scan_types(self):
+ return ["Deepfence Threatmapper Report"]
+
+ def get_label_for_scan_types(self, scan_type):
+ return scan_type
+
+ def get_description_for_scan_types(self, scan_type):
+ return "Deepfence Threatmapper report in XLSX format."
+
+ def get_findings(self, filename, test):
+ workbook = load_workbook(filename)
+ worksheet = workbook.active
+ findings = []
+ headers = {}
+ first = True
+ for row in worksheet.iter_rows(min_row=1, values_only=True):
+ if first:
+ first = False
+ for i in range(len(row)):
+ headers[row[i]] = i
+ elif headers.get("Rule Name") is not None and headers.get("Class") is not None:
+ findings.append(DeepfenceThreatmapperMalware().get_findings(row, headers, test))
+ elif headers.get("Filename") is not None and headers.get("Content") is not None:
+ value = DeepfenceThreatmapperSecret().get_findings(row, headers, test)
+ if value is not None:
+ findings.append(value)
+ elif headers.get("@timestamp") is not None and headers.get("cve_attack_vector") is not None:
+ findings.append(DeepfenceThreatmapperVulnerability().get_findings(row, headers, test))
+ elif headers.get("@timestamp") is not None and headers.get("compliance_check_type") is not None:
+ findings.append(DeepfenceThreatmapperCompliance().get_findings(row, headers, test))
+ return findings
diff --git a/dojo/tools/deepfence_threatmapper/secret.py b/dojo/tools/deepfence_threatmapper/secret.py
new file mode 100644
index 00000000000..fd102be834a
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/secret.py
@@ -0,0 +1,42 @@
+from dojo.models import Finding
+
+
+class DeepfenceThreatmapperSecret:
+ def get_findings(self, row, headers, test):
+ description = ""
+ Filename = row[headers["Filename"]]
+ Content = row[headers["Content"]]
+ Name = row[headers["Name"]]
+ Rule = row[headers["Rule"]]
+ Severity = row[headers["Severity"]]
+ Node_Name = row[headers["Node Name"]]
+ Container_Name = row[headers["Container Name"]]
+ Kubernetes_Cluster_Name = row[headers["Kubernetes Cluster Name"]]
+ Signature = row[headers["Signature"]]
+ description += "**Filename:** " + str(Filename) + "\n"
+ description += "**Name:** " + str(Name) + "\n"
+ description += "**Rule:** " + str(Rule) + "\n"
+ description += "**Node Name:** " + str(Node_Name) + "\n"
+ description += "**Container Name:** " + str(Container_Name) + "\n"
+ description += "**Kubernetes Cluster Name:** " + str(Kubernetes_Cluster_Name) + "\n"
+ description += "**Content:** " + str(Content) + "\n"
+ description += "**Signature:** " + str(Signature) + "\n"
+ if Name is not None and Severity is not None:
+ finding = Finding(
+ title=str(Name),
+ description=description,
+ file_path=Filename,
+ severity=self.severity(Severity),
+ static_finding=False,
+ dynamic_finding=True,
+ test=test,
+ )
+ else:
+ finding = None
+ return finding
+
+ def severity(self, input):
+ if input is None:
+ return "Info"
+ else:
+ return input.capitalize()
diff --git a/dojo/tools/deepfence_threatmapper/vulnerability.py b/dojo/tools/deepfence_threatmapper/vulnerability.py
new file mode 100644
index 00000000000..61c1e505cdc
--- /dev/null
+++ b/dojo/tools/deepfence_threatmapper/vulnerability.py
@@ -0,0 +1,50 @@
+from dojo.models import Finding
+
+
+class DeepfenceThreatmapperVulnerability:
+ def get_findings(self, row, headers, test):
+ description = ""
+ cve_attack_vector = row[headers["cve_attack_vector"]]
+ cve_caused_by_package = row[headers["cve_caused_by_package"]]
+ cve_container_image = row[headers["cve_container_image"]]
+ cve_container_image_id = row[headers["cve_container_image_id"]]
+ cve_description = row[headers["cve_description"]]
+ cve_fixed_in = row[headers["cve_fixed_in"]]
+ cve_id = row[headers["cve_id"]]
+ cve_link = row[headers["cve_link"]]
+ cve_severity = row[headers["cve_severity"]]
+ cve_overall_score = row[headers["cve_overall_score"]]
+ cve_type = row[headers["cve_type"]]
+ host_name = row[headers["host_name"]]
+ cloud_account_id = row[headers["cloud_account_id"]]
+ masked = row[headers["masked"]]
+ description += "**cve_attack_vector:** " + str(cve_attack_vector) + "\n"
+ description += "**cve_caused_by_package:** " + str(cve_caused_by_package) + "\n"
+ description += "**cve_container_image:** " + str(cve_container_image) + "\n"
+ description += "**cve_container_image_id:** " + str(cve_container_image_id) + "\n"
+ description += "**cve_description:** " + str(cve_description) + "\n"
+ description += "**cve_severity:** " + str(cve_severity) + "\n"
+ description += "**cve_overall_score:** " + str(cve_overall_score) + "\n"
+ description += "**cve_type:** " + str(cve_type) + "\n"
+ description += "**host_name:** " + str(host_name) + "\n"
+ description += "**cloud_account_id:** " + str(cloud_account_id) + "\n"
+ description += "**masked:** " + str(masked) + "\n"
+ finding = Finding(
+ title="Threatmapper_Vuln_Report-" + cve_id,
+ description=description,
+ component_name=cve_caused_by_package,
+ severity=self.severity(cve_severity),
+ static_finding=False,
+ dynamic_finding=True,
+ mitigation=cve_fixed_in,
+ references=cve_link,
+ cve=cve_id,
+ test=test,
+ )
+ return finding
+
+ def severity(self, input):
+ if input is None:
+ return "Info"
+ else:
+ return input.capitalize()
diff --git a/dojo/tools/kubescape/parser.py b/dojo/tools/kubescape/parser.py
index a5797e8402c..be9cd6d741e 100644
--- a/dojo/tools/kubescape/parser.py
+++ b/dojo/tools/kubescape/parser.py
@@ -1,4 +1,5 @@
import json
+import textwrap
from dojo.models import Finding
@@ -13,13 +14,36 @@ def get_label_for_scan_types(self, scan_type):
def get_description_for_scan_types(self, scan_type):
return "Import result of Kubescape JSON output."
+ def find_control_summary_by_id(self, data, control_id):
+ # Browse summaryDetails to look for matching control id. If the Control id is matching, return the first occurence.
+ try:
+ controls = data.get("summaryDetails", {}).get("controls", {})
+ return controls.get(control_id, None)
+ except ValueError:
+ return None
+
+ @staticmethod
+ def __hyperlink(link: str) -> str:
+ return "[" + link + "](" + link + ")"
+
def severity_mapper(self, input):
- if input == 1:
+ if input <= 4:
return "Low"
- elif input == 2:
+ elif input <= 7:
return "Medium"
- elif input == 3:
+ elif input <= 9:
return "High"
+ elif input <= 10:
+ return "Critical"
+
+ def parse_resource_id(self, resource_id):
+ try:
+ parts = resource_id.split("/")
+ resource_type = parts[-2]
+ resource_name = parts[-1]
+ return resource_type, resource_name
+ except IndexError:
+ return None, None
def get_findings(self, filename, test):
findings = []
@@ -29,27 +53,70 @@ def get_findings(self, filename, test):
data = {}
for resource in data["resources"]:
resourceid = resource["resourceID"]
+ resource_type, resource_name = self.parse_resource_id(resourceid)
results = ([each for each in data["results"] if each.get('resourceID') == resourceid])
controls = results[0].get("controls", [])
- try:
- prioritizedResource = results[0]["prioritizedResource"]["severity"]
- except KeyError:
- prioritizedResource = "Info"
+
for control in controls:
- controlID = control['controlID']
- description = control["name"] + "\n\n"
- description += "**resourceID:** " + resourceid + "\n"
- description += "**resource object:** " + str(resource["object"]) + "\n"
- description += "**controlID:** " + controlID + "\n"
- description += "**Rules:** " + str(control["rules"]) + "\n"
- if self.severity_mapper(prioritizedResource) is None:
- severity = "Info"
- else:
- severity = self.severity_mapper(prioritizedResource)
- find = Finding(title=str(controlID),
- test=test,
- description=description,
- severity=severity,
- static_finding=True)
- findings.append(find)
+ # This condition is true if the result doesn't contain the status for each control (old format)
+ retrocompatibility_condition = 'status' not in control or 'status' not in control['status']
+ if retrocompatibility_condition or control["status"]["status"] == "failed":
+ control_name = control["name"]
+ if resource_type and resource_name and control_name:
+ title = f"{control_name} - {resource_type} {resource_name}"
+ else:
+ title = f"{control_name} - {resourceid}"
+ controlID = control['controlID']
+
+ # Find control details
+ controlSummary = self.find_control_summary_by_id(data, controlID)
+ if controlSummary is None:
+ severity = "Info"
+ mitigation = ""
+ else:
+ severity = self.severity_mapper(controlSummary.get("scoreFactor", 0))
+ # Define mitigation if available
+ if "mitigation" in controlSummary:
+ mitigation = controlSummary["mitigation"]
+ else:
+ mitigation = ""
+
+ armoLink = f"https://hub.armosec.io/docs/{controlID.lower()}"
+ description = "**Summary:** " + f"The ressource '{resourceid}' has failed the control '{control_name}'." + "\n"
+ if controlSummary is not None and "description" in controlSummary:
+ description += "**Description:** " + controlSummary["description"] + "\n"
+
+ # Define category if available
+ if controlSummary is not None and "category" in controlSummary and "subCategory" in controlSummary["category"]:
+ category_name = controlSummary["category"]["name"]
+ category_subname = controlSummary["category"]["subCategory"]["name"]
+ category = f"{category_name} > {category_subname}"
+ description += "**Category:** " + category + "\n"
+ elif controlSummary is not None and "category" in controlSummary and "name" in controlSummary["category"]:
+ category = controlSummary["category"]["name"]
+ description += "**Category:** " + category + "\n"
+
+ description += "View control details here: " + self.__hyperlink(armoLink)
+
+ steps_to_reproduce = "The following rules have failed :" + "\n"
+ steps_to_reproduce += "\t**Rules:** " + str(json.dumps(control["rules"], indent=4)) + "\n"
+
+ steps_to_reproduce += "Resource object may contain evidence:" + "\n"
+ steps_to_reproduce += "\t**Resource object:** " + str(json.dumps(resource["object"], indent=4))
+
+ references = armoLink
+
+ find = Finding(
+ title=textwrap.shorten(title, 150),
+ test=test,
+ description=description,
+ mitigation=mitigation,
+ steps_to_reproduce=steps_to_reproduce,
+ references=references,
+ severity=severity,
+ component_name=resourceid,
+ static_finding=True,
+ dynamic_finding=False
+ )
+ findings.append(find)
return findings
diff --git a/dojo/tools/nancy/__init__.py b/dojo/tools/nancy/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/dojo/tools/pwn_sast/__init__.py b/dojo/tools/pwn_sast/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/dojo/user/urls.py b/dojo/user/urls.py
index 22fb861e22f..adf3dd80cb7 100644
--- a/dojo/user/urls.py
+++ b/dojo/user/urls.py
@@ -30,26 +30,26 @@
if settings.FORGOT_PASSWORD:
urlpatterns.extend([
re_path(r'^password_reset/$', views.DojoPasswordResetView.as_view(
- template_name='dojo/password_reset.html',
+ template_name='login/password_reset.html',
), name="password_reset"),
re_path(r'^password_reset/done/$', auth_views.PasswordResetDoneView.as_view(
- template_name='dojo/password_reset_done.html',
+ template_name='login/password_reset_done.html',
), name='password_reset_done'),
re_path(r'^reset/(?P[0-9A-Za-z_\-]+)/(?P[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,40})/$', auth_views.PasswordResetConfirmView.as_view(
- template_name='dojo/password_reset_confirm.html',
+ template_name='login/password_reset_confirm.html',
), name='password_reset_confirm'),
re_path(r'^reset/done/$', auth_views.PasswordResetCompleteView.as_view(
- template_name='dojo/password_reset_complete.html',
+ template_name='login/password_reset_complete.html',
), name='password_reset_complete'),
])
if settings.FORGOT_USERNAME:
urlpatterns.extend([
re_path(r'^forgot_username_done/$', auth_views.PasswordResetDoneView.as_view(
- template_name='dojo/forgot_username_done.html',
+ template_name='login/forgot_username_done.html',
), name="forgot_username_done"),
re_path(r'^forgot_username/$', views.DojoForgotUsernameView.as_view(
- template_name='dojo/forgot_username.html',
+ template_name='login/forgot_username.html',
success_url=reverse_lazy("forgot_username_done")
), name="forgot_username"),
])
diff --git a/dojo/user/views.py b/dojo/user/views.py
index c971d932c16..ea60c93fc1b 100644
--- a/dojo/user/views.py
+++ b/dojo/user/views.py
@@ -612,8 +612,8 @@ def send_mail(self, subject_template_name, email_template_name,
from_email = get_system_setting('email_from')
url = hyperlink.parse(settings.SITE_URL)
- subject_template_name = 'dojo/forgot_username_subject.html'
- email_template_name = 'notifications/mail/forgot_username.tpl'
+ subject_template_name = 'login/forgot_username_subject.html'
+ email_template_name = 'login/forgot_username.tpl'
context['site_name'] = url.host
context['protocol'] = url.scheme
context['domain'] = settings.SITE_URL[len(f'{url.scheme}://'):]
@@ -638,7 +638,7 @@ def send_mail(self, subject_template_name, email_template_name,
from_email = get_system_setting('email_from')
url = hyperlink.parse(settings.SITE_URL)
- email_template_name = 'notifications/mail/forgot_password.tpl'
+ email_template_name = 'login/forgot_password.tpl'
context['site_name'] = url.host
context['protocol'] = url.scheme
context['domain'] = settings.SITE_URL[len(f'{url.scheme}://'):]
diff --git a/dojo/views.py b/dojo/views.py
index 1baee23ad8f..09a0dcad73e 100644
--- a/dojo/views.py
+++ b/dojo/views.py
@@ -39,7 +39,7 @@ def action_history(request, cid, oid):
ct = ContentType.objects.get_for_id(cid)
obj = ct.get_object_for_this_type(pk=oid)
except (KeyError, ObjectDoesNotExist):
- raise Http404()
+ raise Http404
product_id = None
active_tab = None
@@ -136,7 +136,7 @@ def manage_files(request, oid, obj_type):
user_has_permission_or_403(request.user, obj, Permissions.Finding_Edit)
obj_vars = ('view_finding', 'finding_set')
else:
- raise Http404()
+ raise Http404
files_formset = ManageFileFormSet(queryset=obj.files.all())
error = False
@@ -194,7 +194,7 @@ def manage_files(request, oid, obj_type):
def protected_serve(request, path, document_root=None, show_indexes=False):
file = FileUpload.objects.get(file=path)
if not file:
- raise Http404()
+ raise Http404
object_set = list(file.engagement_set.all()) + list(file.test_set.all()) + list(file.finding_set.all())
# Should only one item (but not sure what type) in the list, so O(n=1)
for obj in object_set:
@@ -218,7 +218,7 @@ def access_file(request, fid, oid, obj_type, url=False):
obj = get_object_or_404(Finding, pk=oid)
user_has_permission_or_403(request.user, obj, Permissions.Finding_View)
else:
- raise Http404()
+ raise Http404
# If reaching this far, user must have permission to get file
file = get_object_or_404(FileUpload, pk=fid)
redirect_url = f'{settings.MEDIA_ROOT}/{file.file.url.lstrip(settings.MEDIA_URL)}'
diff --git a/dojo/wsgi.py b/dojo/wsgi.py
index 8f6a14863fe..1f79043d491 100644
--- a/dojo/wsgi.py
+++ b/dojo/wsgi.py
@@ -40,17 +40,17 @@ def is_debugger_listening(port):
if os.environ.get("DD_DEBUG") == "True" and not os.getenv("RUN_MAIN") and is_debugger_listening(debugpy_port) != 0:
logger.info(f"DD_DEBUG is set to True, setting remote debugging on port {debugpy_port}")
try:
- import debugpy
+ import debugpy # noqa: T100
# Required, otherwise debugpy will try to use the uwsgi binary as the python interpreter - https://github.com/microsoft/debugpy/issues/262
debugpy.configure({
"python": "python",
"subProcess": True
})
- debugpy.listen(("0.0.0.0", debugpy_port))
+ debugpy.listen(("0.0.0.0", debugpy_port)) # noqa: T100
if os.environ.get("DD_DEBUG_WAIT_FOR_CLIENT") == "True":
logger.info(f"Waiting for the debugging client to connect on port {debugpy_port}")
- debugpy.wait_for_client()
+ debugpy.wait_for_client() # noqa: T100
print("Debugging client connected, resuming execution")
except RuntimeError as e:
if str(e) != "Can't listen for client connections: [Errno 98] Address already in use":
diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock
index b485770474c..74468c83ba1 100644
--- a/helm/defectdojo/Chart.lock
+++ b/helm/defectdojo/Chart.lock
@@ -4,15 +4,15 @@ dependencies:
version: 9.19.1
- name: postgresql
repository: https://charts.bitnami.com/bitnami
- version: 15.5.1
+ version: 15.5.9
- name: postgresql-ha
repository: https://charts.bitnami.com/bitnami
version: 9.4.11
- name: rabbitmq
repository: https://charts.bitnami.com/bitnami
- version: 14.3.1
+ version: 14.4.4
- name: redis
repository: https://charts.bitnami.com/bitnami
- version: 19.5.0
-digest: sha256:f7d84de0e09aa04522aca1b64fb2a297ad028c507144046f16da47d6750007dd
-generated: "2024-05-30T16:46:40.020662037Z"
+ version: 19.5.5
+digest: sha256:7ad88ea953ebef3acbd1270eeae206e4e650f2fb20f754e0d912688795500b18
+generated: "2024-06-24T18:56:55.876075791Z"
diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml
index 44d22f12954..ab21a0409cb 100644
--- a/helm/defectdojo/Chart.yaml
+++ b/helm/defectdojo/Chart.yaml
@@ -1,8 +1,8 @@
apiVersion: v2
-appVersion: "2.36.0-dev"
+appVersion: "2.37.0-dev"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
-version: 1.6.137-dev
+version: 1.6.138-dev
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
@@ -23,7 +23,7 @@ dependencies:
alias: postgresqlha
condition: postgresqlha.enabled
- name: rabbitmq
- version: ~14.3.0
+ version: ~14.4.0
repository: "https://charts.bitnami.com/bitnami"
condition: rabbitmq.enabled
- name: redis
diff --git a/helm/defectdojo/values.yaml b/helm/defectdojo/values.yaml
index 75f3d540012..1faba1520b3 100644
--- a/helm/defectdojo/values.yaml
+++ b/helm/defectdojo/values.yaml
@@ -457,7 +457,7 @@ cloudsql:
image:
# set repo and image tag of gce-proxy
repository: gcr.io/cloudsql-docker/gce-proxy
- tag: 1.35.3
+ tag: 1.35.4
pullPolicy: IfNotPresent
# set CloudSQL instance: 'project:zone:instancename'
instance: ""
diff --git a/requirements-lint.txt b/requirements-lint.txt
index 71fa5443a49..e022cdb619e 100644
--- a/requirements-lint.txt
+++ b/requirements-lint.txt
@@ -1 +1 @@
-ruff==0.4.7
\ No newline at end of file
+ruff==0.4.10
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 2f3742de47e..27eb0b2603f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,19 +3,19 @@ asteval==0.9.33
bleach==6.1.0
bleach[css]
celery==5.4.0
-coverage==7.5.3
+coverage==7.5.4
defusedxml==0.7.1
django_celery_results==2.5.1
django-auditlog==2.3.0
django-dbbackup==4.1.0
django-environ==0.11.2
-django-filter==23.5
+django-filter==24.2
django-imagekit==5.0.0
# This library is very outdated, but is a pillar of DefectDojo
# django-multiselectfield==0.1.12
git+https://github.com/DefectDojo/django-multiselectfield@master#egg=django-multiselectfield
django-polymorphic==3.1.0
-django-crispy-forms==2.0
+django-crispy-forms==2.2
django_extensions==3.2.3
django-slack==5.19.0
# This library is very outdated and not directly. It is used solely for migration
@@ -24,7 +24,7 @@ django-slack==5.19.0
git+https://github.com/DefectDojo/django-tagging@develop#egg=django-tagging
django-watson==1.6.3
django-prometheus==2.3.1
-Django==4.1.13
+Django==4.2.13
djangorestframework==3.14.0
html2text==2024.2.26
humanize==4.9.0
@@ -33,17 +33,17 @@ PyGithub==1.58.2
lxml==5.2.2
Markdown==3.6
mysqlclient==2.1.1
-openpyxl==3.1.3
+openpyxl==3.1.4
Pillow==10.3.0 # required by django-imagekit
psycopg2-binary==2.9.9
-cryptography==42.0.7
+cryptography==42.0.8
python-dateutil==2.9.0.post0
pytz==2024.1
-redis==5.0.4
+redis==5.0.7
requests==2.32.3
-sqlalchemy==2.0.30 # Required by Celery broker transport
+sqlalchemy==2.0.31 # Required by Celery broker transport
urllib3==1.26.18
-uWSGI==2.0.25.1
+uWSGI==2.0.26
vobject==0.9.7
whitenoise==5.2.0
titlecase==2.4.1
@@ -51,14 +51,14 @@ social-auth-app-django==5.4.1
social-auth-core==4.5.4
Python-jose==3.3.0
gitpython==3.1.43
-debugpy==1.8.1
+debugpy==1.8.2
python-gitlab==4.6.0
cpe==1.2.1
-packageurl-python==0.15.0
+packageurl-python==0.15.1
django-crum==0.7.9
JSON-log-formatter==1.0
django-split-settings==1.3.1
-django-debug-toolbar==4.3.0
+django-debug-toolbar==4.4.2
django-debug-toolbar-request-history==0.1.4
vcrpy==6.0.1
vcrpy-unittest==0.1.7
@@ -70,12 +70,12 @@ hyperlink==21.0.0
django-test-migrations==1.3.0
djangosaml2==1.9.3
drf-spectacular==0.27.2
-drf-spectacular-sidecar==2024.5.1
+drf-spectacular-sidecar==2024.6.1
django-ratelimit==4.1.0
argon2-cffi==23.1.0
blackduck==1.1.3
pycurl==7.45.3 # Required for Celery Broker AWS (SQS) support
-boto3==1.34.117 # Required for Celery Broker AWS (SQS) support
+boto3==1.34.135 # Required for Celery Broker AWS (SQS) support
netaddr==1.3.0
vulners==2.1.7
fontawesomefree==6.5.1
diff --git a/ruff.toml b/ruff.toml
index 20593fa2390..1349d475e92 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -39,18 +39,24 @@ select = [
"UP",
"YTT",
"ASYNC",
- "EXE",
"TRIO",
+ "S2", "S5", "S7",
"C4",
+ "T10",
"DJ003", "DJ012", "DJ013",
"EM",
+ "EXE",
"ICN",
"LOG",
+ "INP",
"SLOT",
+ "RSE",
"PD",
"PGH",
+ "TRY003",
"TRY004",
"TRY2",
+ "TRY302",
"FLY",
"NPY",
"AIR",
diff --git a/unittests/scans/deepfence_threatmapper/compliance_report.csv b/unittests/scans/deepfence_threatmapper/compliance_report.csv
new file mode 100644
index 00000000000..573b460c545
--- /dev/null
+++ b/unittests/scans/deepfence_threatmapper/compliance_report.csv
@@ -0,0 +1,8 @@
+@timestamp,compliance_check_type,count,doc_id,host_name,cloud_account_id,masked,node_id,node_name,node_type,status,test_category,test_desc,test_info,test_number
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,149c4791fc6502e5a30f738d4eaba982,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,pass,Docker Files,3.6 - PASS,Ensure that /fenced/mnt/host/etc/docker directory permissions are set to 755 or more restrictively (Automated),gdpr_3.6
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,47edf84375c0bb90f48fa61684883b04,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,info,Docker Files,3.12 - INFO,Ensure that the Docker server certificate file permissions are set to 444 or more restrictively (Automated),gdpr_3.12
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,ad1965efb22e226df8a95a361a30cbc3,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,info,Docker Files,3.2 - INFO,Ensure that docker.service file permissions are appropriately set (Automated),gdpr_3.2
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,1db7418dc73082cdfc1c9e0d5ba5f6e0,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,warn,Audit,1.1.12 - WARN,1.1.12 Ensure auditing is configured for Dockerfiles and directories - /fenced/mnt/host/etc/containerd/config.toml (Automated),gdpr_1.1.12
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,2c3f915f3e72d6e16d192ae9aa71c704,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,pass,Docker Files,3.16 - PASS,Ensure that the Docker socket file permissions are set to 660 or more restrictively (Automated),gdpr_3.16
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,d158a60b1c623d11ce88cf68555e08af,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,info,Docker Files,3.4 - INFO,Ensure that docker.socket file permissions are set to 644 or more restrictive (Automated),gdpr_3.4
+2024-01-25 11:17:30.272 +0000 UTC,gdpr,,,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,182758849647,False,4d1d5b7a279ce57b0f76be61b461d22c,cf-ngm-dev-cicd-ip-xxx-xxx-xxx-xxx.eu-central-1.compute.internal,host,info,Docker Files,3.14 - INFO,Ensure that the Docker server certificate key file permissions are set to 400 (Automated),gdpr_3.14
\ No newline at end of file
diff --git a/unittests/scans/deepfence_threatmapper/compliance_report.xlsx b/unittests/scans/deepfence_threatmapper/compliance_report.xlsx
new file mode 100644
index 00000000000..2fc8e933b96
Binary files /dev/null and b/unittests/scans/deepfence_threatmapper/compliance_report.xlsx differ
diff --git a/unittests/scans/deepfence_threatmapper/malware_report.csv b/unittests/scans/deepfence_threatmapper/malware_report.csv
new file mode 100644
index 00000000000..3dd1bb8b6cd
--- /dev/null
+++ b/unittests/scans/deepfence_threatmapper/malware_report.csv
@@ -0,0 +1,10 @@
+Rule Name,Class,File Name,Summary,Severity,Node Name,NodeType,Container Name,Kubernetes Cluster Name
+MD5_Constants,Crypto Mining,/tmp/Deepfence/YaraHunter/df_db09257b02e615049e0aecc05be2dc2401735e67db4ab74225df777c62c39753/usr/sbin/mkfs.cramfs,The matched rule file's author is phoul (@phoul) .The file has a rule match that It is a crypto signature.Look for MD5 constants .The matched rule file's Date is 2014-01 .The matched rule file's version is 0.2 .,low,portal / secpipe-core-prd-ip-zzz-zzz-zzz-zzz.eu-west-1.compute.internal,container,portal,secpipe-core-prd
+MD5_Constants,Crypto Mining,/tmp/Deepfence/YaraHunter/df_80ffd64c318595cf17a9ea482315b0c2a03572fb6e41f7ee53ec27786158c27c/usr/sbin/mkfs.cramfs,The matched rule file's author is phoul (@phoul) .The file has a rule match that It is a crypto signature.Look for MD5 constants .The matched rule file's Date is 2014-01 .The matched rule file's version is 0.2 .,low,portal / secpipe-core-prd-ip-uuu-uuu-uuu-uuu.eu-west-1.compute.internal,container,portal,secpipe-core-prd
+CRC32_table,Crypto Mining,/tmp/Deepfence/YaraHunter/df_0dfa48a10ee6ca92c7d910ecd72a6207978f7f1bdc36870bf1587625f0270d37/lib/libz.so.1.2.13,The matched rule file's author is _pusher_ .The file has a rule match that It is a crypto signature.Look for CRC32 table .The matched rule file's Date is 2015-05 .The matched rule file's version is 0.1 .,low,nginx / secpipe-core-prd-ip-kkk-kkk-kkk-kkk.eu-west-1.compute.internal,container,nginx,secpipe-core-prd
+CRC32_poly_Constant,Crypto Mining,/tmp/Deepfence/YaraHunter/df_0dfa48a10ee6ca92c7d910ecd72a6207978f7f1bdc36870bf1587625f0270d37/lib/libz.so.1.2.13,The matched rule file's author is _pusher_ .The file has a rule match that It is a crypto signature.Look for CRC32 [poly] .The matched rule file's Date is 2015-05 .The matched rule file's version is 0.1 .,low,nginx / secpipe-core-prd-ip-kkk-kkk-kkk-kkk.eu-west-1.compute.internal,container,nginx,secpipe-core-prd
+MD5_Constants,Crypto Mining,/tmp/Deepfence/YaraHunter/df_cc54a20c0e1cee5e4951d047e13f69551cfddedbd67a05cc4e3de61939b10e7a/usr/sbin/mkfs.cramfs,The matched rule file's author is phoul (@phoul) .The file has a rule match that It is a crypto signature.Look for MD5 constants .The matched rule file's Date is 2014-01 .The matched rule file's version is 0.2 .,low,portal / secpipe-core-prd-ip-yyy-yyy-yyy-yyy.eu-west-1.compute.internal,container,portal,secpipe-core-prd
+MD5_Constants,Crypto Mining,/tmp/Deepfence/YaraHunter/df_5e10a8e665e9def9227c98ec630c80d8c8b441c389c3d2b25d7c8d3b07c94eb4/sbin/mkfs.cramfs,The matched rule file's author is phoul (@phoul) .The file has a rule match that It is a crypto signature.Look for MD5 constants .The matched rule file's Date is 2014-01 .The matched rule file's version is 0.2 .,low,rabbitmq / secpipe-core-prd-ip-xxx-xxx-xxx-xxx.eu-west-1.compute.internal,container,rabbitmq,secpipe-core-prd
+BASE64_table,Crypto Mining,/tmp/Deepfence/YaraHunter/df_5e10a8e665e9def9227c98ec630c80d8c8b441c389c3d2b25d7c8d3b07c94eb4/lib/x86_64-linux-gnu/libresolv-2.31.so,The matched rule file's author is _pusher_ .The file has a rule match that It is a crypto signature.Look for Base64 table .The matched rule file's Date is 2015-07 .The matched rule file's version is 0.1 .,low,rabbitmq / secpipe-core-prd-ip-xxx-xxx-xxx-xxx.eu-west-1.compute.internal,container,rabbitmq,secpipe-core-prd
+BASE64_table,Crypto Mining,/tmp/Deepfence/YaraHunter/df_5e10a8e665e9def9227c98ec630c80d8c8b441c389c3d2b25d7c8d3b07c94eb4/opt/bitnami/erlang/lib/erlang/erts-13.1.3/bin/beam.smp,The matched rule file's author is _pusher_ .The file has a rule match that It is a crypto signature.Look for Base64 table .The matched rule file's Date is 2015-07 .The matched rule file's version is 0.1 .,low,rabbitmq / secpipe-core-prd-ip-xxx-xxx-xxx-xxx.eu-west-1.compute.internal,container,rabbitmq,secpipe-core-prd
+CRC32_table,Crypto Mining,/tmp/Deepfence/YaraHunter/df_5e10a8e665e9def9227c98ec630c80d8c8b441c389c3d2b25d7c8d3b07c94eb4/lib/x86_64-linux-gnu/libz.so.1.2.11,The matched rule file's author is _pusher_ .The file has a rule match that It is a crypto signature.Look for CRC32 table .The matched rule file's Date is 2015-05 .The matched rule file's version is 0.1 .,low,rabbitmq / secpipe-core-prd-ip-xxx-xxx-xxx-xxx.eu-west-1.compute.internal,container,rabbitmq,secpipe-core-prd
\ No newline at end of file
diff --git a/unittests/scans/deepfence_threatmapper/malware_report.xlsx b/unittests/scans/deepfence_threatmapper/malware_report.xlsx
new file mode 100644
index 00000000000..0980439a07e
Binary files /dev/null and b/unittests/scans/deepfence_threatmapper/malware_report.xlsx differ
diff --git a/unittests/scans/deepfence_threatmapper/secret_report.csv b/unittests/scans/deepfence_threatmapper/secret_report.csv
new file mode 100644
index 00000000000..ea8bbd8b4e0
--- /dev/null
+++ b/unittests/scans/deepfence_threatmapper/secret_report.csv
@@ -0,0 +1,8 @@
+Filename,Content,Name,Rule,Severity,Node Name,Container Name,Kubernetes Cluster Name,Signature
+usr/share/doc/curl-8.3.0/TheArtOfHttpScripting.md,"""\n curl http://user:password@example.org/""",Username and password in URI,110,high,fluent-bit / secpipe-core-prd-ip-xxx-xxx-xxx-xxx.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,"([\w+]{1,24})(://)([^$<]{1})([^\s"";]{1,}):([^$<]{1})([^\s"";/]{1,})@[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,24}([^\s]+)"
+usr/share/doc/curl-8.3.0/TheArtOfHttpScripting.md,"""\n curl http://user:password@example.org/""",Username and password in URI,110,high,fluent-bit / secpipe-core-prd-ip-yyy-yyy-yyy-yyy.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,"([\w+]{1,24})(://)([^$<]{1})([^\s"";]{1,}):([^$<]{1})([^\s"";/]{1,})@[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,24}([^\s]+)"
+var/lib/yum/history/history-2023-10-12.sqlite,""".sqlite""",SQLite database file,12,low,fluent-bit / secpipe-core-prd-ip-yyy-yyy-yyy-yyy.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,
+usr/share/mime/magic,"""\n\u003e0=\u0000%-----BEGIN PGP PRIVATE KEY BLOCK-----""",Contains a private key,127,medium,fluent-bit / secpipe-core-prd-ip-yyy-yyy-yyy-yyy.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,-----BEGIN (EC|RSA|DSA|OPENSSH|PGP) PRIVATE KEY
+usr/share/doc/curl-8.3.0/TheArtOfHttpScripting.md,"""\n curl http://user:password@example.org/""",Username and password in URI,110,high,fluent-bit / secpipe-core-prd-ip-zzz-zzz-zzz-zzz.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,"([\w+]{1,24})(://)([^$<]{1})([^\s"";]{1,}):([^$<]{1})([^\s"";/]{1,})@[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,24}([^\s]+)"
+var/lib/yum/history/history-2023-10-12.sqlite,""".sqlite""",SQLite database file,12,low,fluent-bit / secpipe-core-prd-ip-zzz-zzz-zzz-zzz.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,
+usr/share/mime/magic,"""\n\u003e0=\u0000%-----BEGIN PGP PRIVATE KEY BLOCK-----""",Contains a private key,127,medium,fluent-bit / secpipe-core-prd-ip-zzz-zzz-zzz-zzz.eu-west-1.compute.internal,fluent-bit,secpipe-core-prd,-----BEGIN (EC|RSA|DSA|OPENSSH|PGP) PRIVATE KEY
\ No newline at end of file
diff --git a/unittests/scans/deepfence_threatmapper/secret_report.xlsx b/unittests/scans/deepfence_threatmapper/secret_report.xlsx
new file mode 100644
index 00000000000..67e9901bd06
Binary files /dev/null and b/unittests/scans/deepfence_threatmapper/secret_report.xlsx differ
diff --git a/unittests/scans/deepfence_threatmapper/vulnerability_report.csv b/unittests/scans/deepfence_threatmapper/vulnerability_report.csv
new file mode 100644
index 00000000000..a7b22eced7b
--- /dev/null
+++ b/unittests/scans/deepfence_threatmapper/vulnerability_report.csv
@@ -0,0 +1,4 @@
+@timestamp,cve_attack_vector,cve_caused_by_package,cve_container_image,scan_id,cve_container_image_id,cve_cvss_score,cve_description,cve_fixed_in,cve_id,cve_link,cve_severity,cve_overall_score,cve_type,host_name,cloud_account_id,masked
+2024-02-22 15:54:17.939 +0000 UTC,cvss:3.1/av:l/ac:l/pr:n/ui:r/s:u/c:n/i:n/a:l,libsepol:2.5-8.1.amzn2.0.2,aws-node / secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,8031c90cd679ae9fb4d2689e645205d1b403e970b2fbcc19249a8b851996bacf-1708612867,8031c90cd679ae9fb4d2689e645205d1b403e970b2fbcc19249a8b851996bacf,3.3,The CIL compiler in SELinux 3.2 has a use-after-free in __cil_verify_classperms (called from __cil_verify_classpermission and __cil_pre_verify_helper).,2.5-10.amzn2.0.1,CVE-2021-36084,https://www.cve.org/CVERecord?id=CVE-2021-36084,low,3.3,base,secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,,False
+2024-02-07 16:03:35.325 +0000 UTC,cvss:3.1/av:l/ac:l/pr:l/ui:n/s:u/c:h/i:n/a:n,libcurl3-gnutls:7.74.0-1.3+deb11u7,celery / secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,6e86b864c9e8c64b006074335819dbfad83a183ddda541edf3e755586d25870c-1707316590,6e86b864c9e8c64b006074335819dbfad83a183ddda541edf3e755586d25870c,5.5,"An authentication bypass vulnerability exists in libcurl prior to v8.0.0 where it reuses a previously established SSH connection despite the fact that an SSH option was modified, which should have prevented reuse. libcurl maintains a pool of previously used connections to reuse them for subsequent transfers if the configurations match. However, two SSH settings were omitted from the configuration check, allowing them to match easily, potentially leading to the reuse of an inappropriate connection.",8.0.1-1.amzn2.0.1,CVE-2023-27538,https://www.cve.org/CVERecord?id=CVE-2023-27538,medium,5.5,base,secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,,False
+2024-02-07 16:10:15.317 +0000 UTC,av:l/ac:h/au:n/c:c/i:n/a:n,openssl:1.1.1w-0+deb11u1,deepfence-agent / secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,d8e1a0a630121994c1f6cf7486c2573781827c0c7c779860ce88b9eb0777218e-1707316590,d8e1a0a630121994c1f6cf7486c2573781827c0c7c779860ce88b9eb0777218e,4.0,"OpenSSL 0.9.8i on the Gaisler Research LEON3 SoC on the Xilinx Virtex-II Pro FPGA uses a Fixed Width Exponentiation (FWE) algorithm for certain signature calculations, and does not verify the signature before providing it to a caller, which makes it easier for physically proximate attackers to determine the private key via a modified supply voltage for the microprocessor, related to a ""fault-based attack.""",,CVE-2010-0928,https://www.cve.org/CVERecord?id=CVE-2010-0928,medium,4.0,base,secpipe-core-prd-ip-10-xxx-xx-xx.eu-west-1.compute.internal,,False
\ No newline at end of file
diff --git a/unittests/scans/deepfence_threatmapper/vulnerability_report.xlsx b/unittests/scans/deepfence_threatmapper/vulnerability_report.xlsx
new file mode 100644
index 00000000000..8752b80d15c
Binary files /dev/null and b/unittests/scans/deepfence_threatmapper/vulnerability_report.xlsx differ
diff --git a/unittests/test_remote_user.py b/unittests/test_remote_user.py
index c039e006db6..28d9a139bdb 100644
--- a/unittests/test_remote_user.py
+++ b/unittests/test_remote_user.py
@@ -33,12 +33,9 @@ def test_disabled(self):
)
def test_basic(self):
resp = self.client1.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- # headers={
- # "Remote-User": self.user.username
- # }
+ headers={
+ "Remote-User": self.user.username
+ }
)
self.assertEqual(resp.status_code, 200)
@@ -51,18 +48,12 @@ def test_basic(self):
)
def test_update_user(self):
resp = self.client1.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_FIRSTNAME="new_first",
- HTTP_REMOTE_LASTNAME="new_last",
- HTTP_REMOTE_EMAIL="new@mail.com",
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Firstname": "new_first",
- # "Remote-Lastname": "new_last",
- # "Remote-Email": "new@mail.com",
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Firstname": "new_first",
+ "Remote-Lastname": "new_last",
+ "Remote-Email": "new@mail.com",
+ }
)
self.assertEqual(resp.status_code, 200)
updated_user = User.objects.get(pk=self.user.pk)
@@ -78,14 +69,10 @@ def test_update_user(self):
)
def test_update_groups_cleanup(self):
resp = self.client1.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_GROUPS=self.group1.name,
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Groups": self.group1.name,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Groups": self.group1.name,
+ }
)
self.assertEqual(resp.status_code, 200)
dgms = Dojo_Group_Member.objects.filter(user=self.user)
@@ -93,14 +80,10 @@ def test_update_groups_cleanup(self):
self.assertEqual(dgms.first().group.name, self.group1.name)
resp = self.client2.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_GROUPS=self.group2.name,
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Groups": self.group2.name,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Groups": self.group2.name,
+ }
)
self.assertEqual(resp.status_code, 200)
dgms = Dojo_Group_Member.objects.all().filter(user=self.user)
@@ -115,14 +98,10 @@ def test_update_groups_cleanup(self):
)
def test_update_multiple_groups_cleanup(self):
resp = self.client1.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_GROUPS=f"{self.group1.name},{self.group2.name}",
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Groups": f"{self.group1.name},{self.group2.name}",
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Groups": f"{self.group1.name},{self.group2.name}",
+ }
)
self.assertEqual(resp.status_code, 200)
dgms = Dojo_Group_Member.objects.filter(user=self.user)
@@ -136,26 +115,18 @@ def test_update_multiple_groups_cleanup(self):
)
def test_update_groups_no_cleanup(self):
resp = self.client1.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_GROUPS=self.group1.name,
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Groups": self.group1.name,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Groups": self.group1.name,
+ }
)
self.assertEqual(resp.status_code, 200)
resp = self.client2.get('/profile',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- HTTP_REMOTE_GROUPS=self.group2.name,
- # headers = {
- # "Remote-User": self.user.username,
- # "Remote-Groups": self.group2.name,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ "Remote-Groups": self.group2.name,
+ }
)
self.assertEqual(resp.status_code, 200)
dgms = Dojo_Group_Member.objects.filter(user=self.user)
@@ -169,12 +140,9 @@ def test_update_groups_no_cleanup(self):
def test_trusted_proxy(self):
resp = self.client1.get('/profile',
REMOTE_ADDR='192.168.0.42',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- # headers = {
- # "Remote-User": self.user.username,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ }
)
self.assertEqual(resp.status_code, 200)
@@ -187,12 +155,9 @@ def test_untrusted_proxy(self):
with self.assertLogs('dojo.remote_user', level='DEBUG') as cm:
resp = self.client1.get('/profile',
REMOTE_ADDR='192.168.1.42',
- # TODO - This can be replaced by following lines in the future
- # Using of "headers" is supported since Django 4.2
- HTTP_REMOTE_USER=self.user.username,
- # headers = {
- # "Remote-User": self.user.username,
- # }
+ headers={
+ "Remote-User": self.user.username,
+ }
)
self.assertEqual(resp.status_code, 302)
self.assertIn('Requested came from untrusted proxy', cm.output[0])
diff --git a/unittests/test_rest_framework.py b/unittests/test_rest_framework.py
index 823a10f3b59..ce1ad77da16 100644
--- a/unittests/test_rest_framework.py
+++ b/unittests/test_rest_framework.py
@@ -175,20 +175,6 @@ def wrapper(self, *args, **kwargs):
return decorate
-# def testIsBroken(method):
-# return tag("broken")(method)
-
-
-def check_response_valid(expected_code, response):
- def _data_to_str(response):
- if hasattr(response, "data"):
- return response.data
- return None
-
- assert response.status_code == expected_code, \
- f"Response invalid, returned with code {response.status_code}\nResponse Data:\n{_data_to_str(response)}"
-
-
def format_url(path):
return f"{BASE_API_URL}{path}"
@@ -367,29 +353,29 @@ def setUp(self):
self.url = reverse(self.viewname + '-list')
self.schema = open_api3_json_schema
+ def setUp_not_authorized(self):
+ testuser = User.objects.get(id=3)
+ token = Token.objects.get(user=testuser)
+ self.client = APIClient()
+ self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
+
+ def setUp_global_reader(self):
+ testuser = User.objects.get(id=5)
+ token = Token.objects.get(user=testuser)
+ self.client = APIClient()
+ self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
+
+ def setUp_global_owner(self):
+ testuser = User.objects.get(id=6)
+ token = Token.objects.get(user=testuser)
+ self.client = APIClient()
+ self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
+
def check_schema(self, schema, obj):
schema_checker = SchemaChecker(self.schema["components"])
# print(vars(schema_checker))
schema_checker.check(self.schema, obj)
- # def get_valid_object_id(self):
- # response = self.client.get(format_url(f"/{self.viewname}/"))
- # check_response_valid(status.HTTP_200_OK, response)
- # if len(response.data["results"]) == 0:
- # return None
-
- # return response.data["results"][0].get('id', None)
-
- def get_endpoint_schema(self, path, method):
- paths = self.schema["paths"]
- methods = paths.get(path, None)
- assert methods is not None, f"{path} not found in {list(paths.keys())}"
-
- endpoint = methods.get(method, None)
- assert endpoint is not None, f"Method {method} not found in {list(methods.keys())}"
-
- return endpoint
-
def check_schema_response(self, method, status_code, response, detail=False):
detail_path = '{id}/' if detail else ''
endpoints_schema = self.schema["paths"][format_url(f"/{self.endpoint_path}/{detail_path}")]
@@ -397,64 +383,7 @@ def check_schema_response(self, method, status_code, response, detail=False):
obj = response.data
self.check_schema(schema, obj)
- @skipIfNotSubclass(ListModelMixin)
- def test_list(self):
- # print(open_api3_json_schema)
- # validator = ResponseValidator(spec)
-
- check_for_tags = False
- if hasattr(self.endpoint_model, 'tags') and self.payload and self.payload.get('tags', None):
- # create a new instance first to make sure there's at least 1 instance with tags set by payload to trigger tag handling code
- logger.debug('creating model with endpoints: %s', self.payload)
- response = self.client.post(self.url, self.payload)
- self.assertEqual(201, response.status_code, response.content[:1000])
-
- # print('response:', response.content[:1000])
- check_for_id = response.data['id']
- # print('id: ', check_for_id)
- check_for_tags = self.payload.get('tags', None)
-
- response = self.client.get(self.url, format='json')
- # print('response')
- # print(vars(response))
-
- # print('response.data')
- # print(response.data)
- # tags must be present in last entry, the one we created
- if check_for_tags:
- tags_found = False
- for result in response.data['results']:
- if result['id'] == check_for_id:
- # logger.debug('result.tags: %s', result.get('tags', ''))
- self.assertEqual(len(check_for_tags), len(result.get('tags', None)))
- for tag in check_for_tags:
- # logger.debug('looking for tag %s in tag list %s', tag, result['tags'])
- self.assertIn(tag, result['tags'])
- tags_found = True
- self.assertTrue(tags_found)
-
- self.assertEqual(200, response.status_code, response.content[:1000])
-
- self.check_schema_response('get', '200', response)
-
- @skipIfNotSubclass(CreateModelMixin)
- def test_create(self):
- length = self.endpoint_model.objects.count()
- response = self.client.post(self.url, self.payload)
- logger.debug('test_create_response:')
- logger.debug(response)
- logger.debug(response.data)
- self.assertEqual(201, response.status_code, response.content[:1000])
- self.assertEqual(self.endpoint_model.objects.count(), length + 1)
-
- if hasattr(self.endpoint_model, 'tags') and self.payload and self.payload.get('tags', None):
- self.assertEqual(len(self.payload.get('tags')), len(response.data.get('tags', None)))
- for tag in self.payload.get('tags'):
- # logger.debug('looking for tag %s in tag list %s', tag, response.data['tags'])
- self.assertIn(tag, response.data['tags'])
-
- self.check_schema_response('post', '201', response)
-
+ class RetrieveRequestTest(RESTEndpointTest):
@skipIfNotSubclass(RetrieveModelMixin)
def test_detail(self):
current_objects = self.client.get(self.url, format='json').data
@@ -469,80 +398,6 @@ def test_detail(self):
self.check_schema_response('get', '200', response, detail=True)
- @skipIfNotSubclass(DestroyModelMixin)
- def test_delete(self):
- current_objects = self.client.get(self.url, format='json').data
- relative_url = self.url + '{}/'.format(current_objects['results'][-1]['id'])
- response = self.client.delete(relative_url)
- self.assertEqual(204, response.status_code, response.content[:1000])
-
- @skipIfNotSubclass(UpdateModelMixin)
- def test_update(self):
- current_objects = self.client.get(self.url, format='json').data
- relative_url = self.url + '{}/'.format(current_objects['results'][0]['id'])
- response = self.client.patch(relative_url, self.update_fields)
- self.assertEqual(200, response.status_code, response.content[:1000])
-
- self.check_schema_response('patch', '200', response, detail=True)
-
- for key, value in self.update_fields.items():
- # some exception as push_to_jira has been implemented strangely in the update methods in the api
- if key not in ['push_to_jira', 'ssh', 'password', 'api_key']:
- # Convert data to sets to avoid problems with lists
- if isinstance(value, list):
- value = set(value)
- if isinstance(response.data[key], list):
- response_data = set(response.data[key])
- else:
- response_data = response.data[key]
- self.assertEqual(value, response_data)
-
- self.assertNotIn('push_to_jira', response.data)
- self.assertNotIn('ssh', response.data)
- self.assertNotIn('password', response.data)
- self.assertNotIn('api_key', response.data)
-
- if hasattr(self.endpoint_model, 'tags') and self.update_fields and self.update_fields.get('tags', None):
- self.assertEqual(len(self.update_fields.get('tags')), len(response.data.get('tags', None)))
- for tag in self.update_fields.get('tags'):
- logger.debug('looking for tag %s in tag list %s', tag, response.data['tags'])
- self.assertIn(tag, response.data['tags'])
-
- response = self.client.put(
- relative_url, self.payload)
- self.assertEqual(200, response.status_code, response.content[:1000])
-
- self.check_schema_response('put', '200', response, detail=True)
-
- @skipIfNotSubclass(DeletePreviewModelMixin)
- def test_delete_preview(self):
- current_objects = self.client.get(self.url, format='json').data
- relative_url = self.url + '{}/delete_preview/'.format(current_objects['results'][0]['id'])
- response = self.client.get(relative_url)
- # print('delete_preview response.data')
-
- self.assertEqual(200, response.status_code, response.content[:1000])
-
- self.check_schema_response('get', '200', response, detail=True)
-
- self.assertNotIn('push_to_jira', response.data)
- self.assertNotIn('password', response.data)
- self.assertNotIn('ssh', response.data)
- self.assertNotIn('api_key', response.data)
-
- self.assertIsInstance(response.data['results'], list)
- self.assertGreater(len(response.data['results']), 0, "Length: {}".format(len(response.data['results'])))
-
- for obj in response.data['results']:
- self.assertIsInstance(obj, dict)
- self.assertEqual(len(obj), 3)
- self.assertIsInstance(obj['model'], str)
- if obj['id']: # It needs to be None or int
- self.assertIsInstance(obj['id'], int)
- self.assertIsInstance(obj['name'], str)
-
- self.assertEqual(self.deleted_objects, len(response.data['results']), response.content[:1000])
-
@skipIfNotSubclass(PrefetchRetrieveMixin)
def test_detail_prefetch(self):
# print("=======================================================")
@@ -571,6 +426,71 @@ def test_detail_prefetch(self):
# TODO add schema check
+ @skipIfNotSubclass(RetrieveModelMixin)
+ def test_detail_object_not_authorized(self):
+ if not self.test_type == TestType.OBJECT_PERMISSIONS:
+ self.skipTest('Authorization is not object based')
+
+ self.setUp_not_authorized()
+
+ current_objects = self.endpoint_model.objects.all()
+ relative_url = self.url + f'{current_objects[0].id}/'
+ response = self.client.get(relative_url)
+ self.assertEqual(404, response.status_code, response.content[:1000])
+
+ @skipIfNotSubclass(RetrieveModelMixin)
+ def test_detail_configuration_not_authorized(self):
+ if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
+ self.skipTest('Authorization is not configuration based')
+
+ self.setUp_not_authorized()
+
+ current_objects = self.endpoint_model.objects.all()
+ relative_url = self.url + f'{current_objects[0].id}/'
+ response = self.client.get(relative_url)
+ self.assertEqual(403, response.status_code, response.content[:1000])
+
+ class ListRequestTest(RESTEndpointTest):
+ @skipIfNotSubclass(ListModelMixin)
+ def test_list(self):
+ # print(open_api3_json_schema)
+ # validator = ResponseValidator(spec)
+
+ check_for_tags = False
+ if hasattr(self.endpoint_model, 'tags') and self.payload and self.payload.get('tags', None):
+ # create a new instance first to make sure there's at least 1 instance with tags set by payload to trigger tag handling code
+ logger.debug('creating model with endpoints: %s', self.payload)
+ response = self.client.post(self.url, self.payload)
+ self.assertEqual(201, response.status_code, response.content[:1000])
+
+ # print('response:', response.content[:1000])
+ check_for_id = response.data['id']
+ # print('id: ', check_for_id)
+ check_for_tags = self.payload.get('tags', None)
+
+ response = self.client.get(self.url, format='json')
+ # print('response')
+ # print(vars(response))
+
+ # print('response.data')
+ # print(response.data)
+ # tags must be present in last entry, the one we created
+ if check_for_tags:
+ tags_found = False
+ for result in response.data['results']:
+ if result['id'] == check_for_id:
+ # logger.debug('result.tags: %s', result.get('tags', ''))
+ self.assertEqual(len(check_for_tags), len(result.get('tags', None)))
+ for tag in check_for_tags:
+ # logger.debug('looking for tag %s in tag list %s', tag, result['tags'])
+ self.assertIn(tag, result['tags'])
+ tags_found = True
+ self.assertTrue(tags_found)
+
+ self.assertEqual(200, response.status_code, response.content[:1000])
+
+ self.check_schema_response('get', '200', response)
+
@skipIfNotSubclass(PrefetchListMixin)
def test_list_prefetch(self):
prefetchable_fields = [x[0] for x in _get_prefetchable_fields(self.viewset.serializer_class)]
@@ -600,24 +520,6 @@ def test_list_prefetch(self):
# TODO add schema check
- def setUp_not_authorized(self):
- testuser = User.objects.get(id=3)
- token = Token.objects.get(user=testuser)
- self.client = APIClient()
- self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
-
- def setUp_global_reader(self):
- testuser = User.objects.get(id=5)
- token = Token.objects.get(user=testuser)
- self.client = APIClient()
- self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
-
- def setUp_global_owner(self):
- testuser = User.objects.get(id=6)
- token = Token.objects.get(user=testuser)
- self.client = APIClient()
- self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
-
@skipIfNotSubclass(ListModelMixin)
def test_list_object_not_authorized(self):
if not self.test_type == TestType.OBJECT_PERMISSIONS:
@@ -629,17 +531,34 @@ def test_list_object_not_authorized(self):
self.assertFalse(response.data['results'])
self.assertEqual(200, response.status_code, response.content[:1000])
- @skipIfNotSubclass(RetrieveModelMixin)
- def test_detail_object_not_authorized(self):
- if not self.test_type == TestType.OBJECT_PERMISSIONS:
- self.skipTest('Authorization is not object based')
+ @skipIfNotSubclass(ListModelMixin)
+ def test_list_configuration_not_authorized(self):
+ if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
+ self.skipTest('Authorization is not configuration based')
self.setUp_not_authorized()
- current_objects = self.endpoint_model.objects.all()
- relative_url = self.url + f'{current_objects[0].id}/'
- response = self.client.get(relative_url)
- self.assertEqual(404, response.status_code, response.content[:1000])
+ response = self.client.get(self.url, format='json')
+ self.assertEqual(403, response.status_code, response.content[:1000])
+
+ class CreateRequestTest(RESTEndpointTest):
+ @skipIfNotSubclass(CreateModelMixin)
+ def test_create(self):
+ length = self.endpoint_model.objects.count()
+ response = self.client.post(self.url, self.payload)
+ logger.debug('test_create_response:')
+ logger.debug(response)
+ logger.debug(response.data)
+ self.assertEqual(201, response.status_code, response.content[:1000])
+ self.assertEqual(self.endpoint_model.objects.count(), length + 1)
+
+ if hasattr(self.endpoint_model, 'tags') and self.payload and self.payload.get('tags', None):
+ self.assertEqual(len(self.payload.get('tags')), len(response.data.get('tags', None)))
+ for tag in self.payload.get('tags'):
+ # logger.debug('looking for tag %s in tag list %s', tag, response.data['tags'])
+ self.assertIn(tag, response.data['tags'])
+
+ self.check_schema_response('post', '201', response)
@skipIfNotSubclass(CreateModelMixin)
@patch('dojo.api_v2.permissions.user_has_permission')
@@ -655,28 +574,54 @@ def test_create_object_not_authorized(self, mock):
ANY,
self.permission_create)
- @skipIfNotSubclass(DestroyModelMixin)
- @patch('dojo.api_v2.permissions.user_has_permission')
- def test_delete_object_not_authorized(self, mock):
- if not self.test_type == TestType.OBJECT_PERMISSIONS:
- self.skipTest('Authorization is not object based')
+ @skipIfNotSubclass(CreateModelMixin)
+ def test_create_configuration_not_authorized(self):
+ if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
+ self.skipTest('Authorization is not configuration based')
- mock.return_value = False
+ self.setUp_not_authorized()
+ response = self.client.post(self.url, self.payload)
+ self.assertEqual(403, response.status_code, response.content[:1000])
+
+ class UpdateRequestTest(RESTEndpointTest):
+ @skipIfNotSubclass(UpdateModelMixin)
+ def test_update(self):
current_objects = self.client.get(self.url, format='json').data
relative_url = self.url + '{}/'.format(current_objects['results'][0]['id'])
- self.client.delete(relative_url)
+ response = self.client.patch(relative_url, self.update_fields)
+ self.assertEqual(200, response.status_code, response.content[:1000])
- if self.endpoint_model == Endpoint_Status:
- permission_object = Endpoint.objects.get(id=current_objects['results'][0]['endpoint'])
- elif self.endpoint_model == JIRA_Issue:
- permission_object = Finding.objects.get(id=current_objects['results'][0]['finding'])
- else:
- permission_object = self.permission_check_class.objects.get(id=current_objects['results'][0]['id'])
+ self.check_schema_response('patch', '200', response, detail=True)
- mock.assert_called_with(User.objects.get(username='admin'),
- permission_object,
- self.permission_delete)
+ for key, value in self.update_fields.items():
+ # some exception as push_to_jira has been implemented strangely in the update methods in the api
+ if key not in ['push_to_jira', 'ssh', 'password', 'api_key']:
+ # Convert data to sets to avoid problems with lists
+ if isinstance(value, list):
+ value = set(value)
+ if isinstance(response.data[key], list):
+ response_data = set(response.data[key])
+ else:
+ response_data = response.data[key]
+ self.assertEqual(value, response_data)
+
+ self.assertNotIn('push_to_jira', response.data)
+ self.assertNotIn('ssh', response.data)
+ self.assertNotIn('password', response.data)
+ self.assertNotIn('api_key', response.data)
+
+ if hasattr(self.endpoint_model, 'tags') and self.update_fields and self.update_fields.get('tags', None):
+ self.assertEqual(len(self.update_fields.get('tags')), len(response.data.get('tags', None)))
+ for tag in self.update_fields.get('tags'):
+ logger.debug('looking for tag %s in tag list %s', tag, response.data['tags'])
+ self.assertIn(tag, response.data['tags'])
+
+ response = self.client.put(
+ relative_url, self.payload)
+ self.assertEqual(200, response.status_code, response.content[:1000])
+
+ self.check_schema_response('put', '200', response, detail=True)
@skipIfNotSubclass(UpdateModelMixin)
@patch('dojo.api_v2.permissions.user_has_permission')
@@ -708,70 +653,114 @@ def test_update_object_not_authorized(self, mock):
permission_object,
self.permission_update)
- @skipIfNotSubclass(ListModelMixin)
- def test_list_configuration_not_authorized(self):
+ @skipIfNotSubclass(UpdateModelMixin)
+ def test_update_configuration_not_authorized(self):
if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
self.skipTest('Authorization is not configuration based')
self.setUp_not_authorized()
- response = self.client.get(self.url, format='json')
+ current_objects = self.endpoint_model.objects.all()
+ relative_url = self.url + f'{current_objects[0].id}/'
+
+ response = self.client.patch(relative_url, self.update_fields)
self.assertEqual(403, response.status_code, response.content[:1000])
- @skipIfNotSubclass(RetrieveModelMixin)
- def test_detail_configuration_not_authorized(self):
- if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
- self.skipTest('Authorization is not configuration based')
+ response = self.client.put(relative_url, self.payload)
+ self.assertEqual(403, response.status_code, response.content[:1000])
- self.setUp_not_authorized()
+ class DeleteRequestTest(RESTEndpointTest):
+ @skipIfNotSubclass(DestroyModelMixin)
+ def test_delete(self):
+ if delete_id := getattr(self, "delete_id", None):
+ relative_url = f"{self.url}{delete_id}/"
+ else:
+ current_objects = self.client.get(self.url, format='json').data
+ relative_url = f"{self.url}{current_objects['results'][-1]['id']}/"
+ response = self.client.delete(relative_url)
+ self.assertEqual(204, response.status_code, response.content[:1000])
+
+ @skipIfNotSubclass(DeletePreviewModelMixin)
+ def test_delete_preview(self):
+ if delete_id := getattr(self, "delete_id", None):
+ relative_url = f"{self.url}{delete_id}/delete_preview/"
+ else:
+ current_objects = self.client.get(self.url, format='json').data
+ relative_url = f"{self.url}{current_objects['results'][0]['id']}/delete_preview/"
- current_objects = self.endpoint_model.objects.all()
- relative_url = self.url + f'{current_objects[0].id}/'
response = self.client.get(relative_url)
- self.assertEqual(403, response.status_code, response.content[:1000])
+ # print('delete_preview response.data')
- @skipIfNotSubclass(CreateModelMixin)
- def test_create_configuration_not_authorized(self):
- if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
- self.skipTest('Authorization is not configuration based')
+ self.assertEqual(200, response.status_code, response.content[:1000])
- self.setUp_not_authorized()
+ self.check_schema_response('get', '200', response, detail=True)
- response = self.client.post(self.url, self.payload)
- self.assertEqual(403, response.status_code, response.content[:1000])
+ self.assertNotIn('push_to_jira', response.data)
+ self.assertNotIn('password', response.data)
+ self.assertNotIn('ssh', response.data)
+ self.assertNotIn('api_key', response.data)
+
+ self.assertIsInstance(response.data['results'], list)
+ self.assertGreater(len(response.data['results']), 0, "Length: {}".format(len(response.data['results'])))
+
+ for obj in response.data['results']:
+ self.assertIsInstance(obj, dict)
+ self.assertEqual(len(obj), 3)
+ self.assertIsInstance(obj['model'], str)
+ if obj['id']: # It needs to be None or int
+ self.assertIsInstance(obj['id'], int)
+ self.assertIsInstance(obj['name'], str)
+
+ self.assertEqual(self.deleted_objects, len(response.data['results']), response.content)
@skipIfNotSubclass(DestroyModelMixin)
- def test_delete_configuration_not_authorized(self):
- if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
- self.skipTest('Authorization is not configuration based')
+ @patch('dojo.api_v2.permissions.user_has_permission')
+ def test_delete_object_not_authorized(self, mock):
+ if not self.test_type == TestType.OBJECT_PERMISSIONS:
+ self.skipTest('Authorization is not object based')
- self.setUp_not_authorized()
+ mock.return_value = False
- current_objects = self.endpoint_model.objects.all()
- relative_url = self.url + f'{current_objects[0].id}/'
- response = self.client.delete(relative_url)
- self.assertEqual(403, response.status_code, response.content[:1000])
+ current_objects = self.client.get(self.url, format='json').data
+ relative_url = self.url + '{}/'.format(current_objects['results'][0]['id'])
+ self.client.delete(relative_url)
- @skipIfNotSubclass(UpdateModelMixin)
- def test_update_configuration_not_authorized(self):
+ if self.endpoint_model == Endpoint_Status:
+ permission_object = Endpoint.objects.get(id=current_objects['results'][0]['endpoint'])
+ elif self.endpoint_model == JIRA_Issue:
+ permission_object = Finding.objects.get(id=current_objects['results'][0]['finding'])
+ else:
+ permission_object = self.permission_check_class.objects.get(id=current_objects['results'][0]['id'])
+
+ mock.assert_called_with(User.objects.get(username='admin'),
+ permission_object,
+ self.permission_delete)
+
+ @skipIfNotSubclass(DestroyModelMixin)
+ def test_delete_configuration_not_authorized(self):
if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
self.skipTest('Authorization is not configuration based')
self.setUp_not_authorized()
- current_objects = self.endpoint_model.objects.all()
- relative_url = self.url + f'{current_objects[0].id}/'
-
- response = self.client.patch(relative_url, self.update_fields)
- self.assertEqual(403, response.status_code, response.content[:1000])
-
- response = self.client.put(relative_url, self.payload)
+ if delete_id := getattr(self, "delete_id", None):
+ relative_url = self.url + f'{delete_id}/'
+ else:
+ current_objects = self.endpoint_model.objects.all()
+ relative_url = self.url + f'{current_objects[0].id}/'
+ response = self.client.delete(relative_url)
self.assertEqual(403, response.status_code, response.content[:1000])
- class MemberEndpointTest(RESTEndpointTest):
- def __init__(self, *args, **kwargs):
- BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
+ class BaseClassTest(
+ RetrieveRequestTest,
+ ListRequestTest,
+ CreateRequestTest,
+ UpdateRequestTest,
+ DeleteRequestTest,
+ ):
+ pass
+ class MemberEndpointTest(BaseClassTest):
def test_update(self):
current_objects = self.client.get(self.url, format='json').data
relative_url = self.url + '{}/'.format(current_objects['results'][0]['id'])
@@ -800,10 +789,7 @@ def test_update_object_not_authorized(self, mock):
self.permission_check_class.objects.get(id=current_objects['results'][0]['id']),
self.permission_update)
- class AuthenticatedViewTest(RESTEndpointTest):
- def __init__(self, *args, **kwargs):
- BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-
+ class AuthenticatedViewTest(BaseClassTest):
@skipIfNotSubclass(ListModelMixin)
def test_list_configuration_not_authorized(self):
if not self.test_type == TestType.CONFIGURATION_PERMISSIONS:
@@ -827,7 +813,7 @@ def test_detail_configuration_not_authorized(self):
self.assertEqual(200, response.status_code, response.content[:1000])
-class AppAnalysisTest(BaseClass.RESTEndpointTest):
+class AppAnalysisTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -856,7 +842,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class EndpointStatusTest(BaseClass.RESTEndpointTest):
+class EndpointStatusTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -947,7 +933,7 @@ def test_update_put_unsuccessful(self):
self.assertIn('This endpoint-finding relation already exists', response.content.decode("utf-8"))
-class EndpointTest(BaseClass.RESTEndpointTest):
+class EndpointTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -974,7 +960,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class EngagementTest(BaseClass.RESTEndpointTest):
+class EngagementTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1005,7 +991,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class RiskAcceptanceTest(BaseClass.RESTEndpointTest):
+class RiskAcceptanceTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1149,7 +1135,7 @@ def test_request_response_get(self):
self.assertEqual(200, response.status_code)
-class FindingsTest(BaseClass.RESTEndpointTest):
+class FindingsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1262,7 +1248,7 @@ def test_severity_validation(self):
assert result.json()["severity"] == ["Severity must be one of the following: ['Info', 'Low', 'Medium', 'High', 'Critical']"]
-class FindingMetadataTest(BaseClass.RESTEndpointTest):
+class FindingMetadataTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1324,7 +1310,7 @@ def test_delete(self):
assert len(result) == 0, "Metadata not deleted correctly"
-class FindingTemplatesTest(BaseClass.RESTEndpointTest):
+class FindingTemplatesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1347,7 +1333,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class JiraInstancesTest(BaseClass.RESTEndpointTest):
+class JiraInstancesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1377,7 +1363,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class JiraIssuesTest(BaseClass.RESTEndpointTest):
+class JiraIssuesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1400,7 +1386,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class JiraProjectTest(BaseClass.RESTEndpointTest):
+class JiraProjectTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1427,7 +1413,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class SonarqubeIssueTest(BaseClass.RESTEndpointTest):
+class SonarqubeIssueTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1446,7 +1432,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class SonarqubeIssuesTransitionTest(BaseClass.RESTEndpointTest):
+class SonarqubeIssuesTransitionTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1465,7 +1451,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class Product_API_Scan_ConfigurationTest(BaseClass.RESTEndpointTest):
+class Product_API_Scan_ConfigurationTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1488,7 +1474,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ProductTest(BaseClass.RESTEndpointTest):
+class ProductTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1515,7 +1501,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class StubFindingsTest(BaseClass.RESTEndpointTest):
+class StubFindingsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1546,7 +1532,7 @@ def test_severity_validation(self):
assert result.json()["severity"] == ["Severity must be one of the following: ['Info', 'Low', 'Medium', 'High', 'Critical']"]
-class TestsTest(BaseClass.RESTEndpointTest):
+class TestsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1576,11 +1562,12 @@ def __init__(self, *args, **kwargs):
self.permission_create = Permissions.Test_Add
self.permission_update = Permissions.Test_Edit
self.permission_delete = Permissions.Test_Delete
- self.deleted_objects = 18
+ self.deleted_objects = 5
+ self.delete_id = 55
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ToolConfigurationsTest(BaseClass.RESTEndpointTest):
+class ToolConfigurationsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1606,7 +1593,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ToolProductSettingsTest(BaseClass.RESTEndpointTest):
+class ToolProductSettingsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1632,7 +1619,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ToolTypesTest(BaseClass.RESTEndpointTest):
+class ToolTypesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1650,7 +1637,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class NoteTypesTest(BaseClass.RESTEndpointTest):
+class NoteTypesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1671,7 +1658,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class NotesTest(BaseClass.RESTEndpointTest):
+class NotesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1690,7 +1677,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class UsersTest(BaseClass.RESTEndpointTest):
+class UsersTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1735,7 +1722,7 @@ def test_update_user_other_permissions_will_not_leak_and_stay_untouched(self):
self.assertEqual(set(user_permissions), set(payload['configuration_permissions'] + [26, 28]))
-class UserContactInfoTest(BaseClass.RESTEndpointTest):
+class UserContactInfoTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -1776,7 +1763,7 @@ def test_user_should_not_have_access_to_product_3_in_detail(self):
self.assertEqual(response.status_code, 404)
-class ImportScanTest(BaseClass.RESTEndpointTest):
+class ImportScanTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2408,7 +2395,7 @@ def test_create_not_authorized_product_name_engagement_name_scan_type_title(self
reimporter_mock.assert_not_called()
-class ProductTypeTest(BaseClass.RESTEndpointTest):
+class ProductTypeTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2449,7 +2436,7 @@ def test_create_authorized_owner(self):
self.assertEqual(201, response.status_code, response.content[:1000])
-class DojoGroupsTest(BaseClass.RESTEndpointTest):
+class DojoGroupsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2539,7 +2526,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class RolesTest(BaseClass.RESTEndpointTest):
+class RolesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2551,7 +2538,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class GlobalRolesTest(BaseClass.RESTEndpointTest):
+class GlobalRolesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2589,7 +2576,7 @@ def __init__(self, *args, **kwargs):
self.permission_update = Permissions.Product_Type_Manage_Members
self.permission_delete = Permissions.Product_Type_Member_Delete
self.deleted_objects = 1
- BaseClass.MemberEndpointTest.__init__(self, *args, **kwargs)
+ BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
class ProductMemberTest(BaseClass.MemberEndpointTest):
@@ -2612,7 +2599,7 @@ def __init__(self, *args, **kwargs):
self.permission_update = Permissions.Product_Manage_Members
self.permission_delete = Permissions.Product_Member_Delete
self.deleted_objects = 1
- BaseClass.MemberEndpointTest.__init__(self, *args, **kwargs)
+ BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
class ProductTypeGroupTest(BaseClass.MemberEndpointTest):
@@ -2635,7 +2622,7 @@ def __init__(self, *args, **kwargs):
self.permission_update = Permissions.Product_Type_Group_Edit
self.permission_delete = Permissions.Product_Type_Group_Delete
self.deleted_objects = 1
- BaseClass.MemberEndpointTest.__init__(self, *args, **kwargs)
+ BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
class ProductGroupTest(BaseClass.MemberEndpointTest):
@@ -2658,10 +2645,10 @@ def __init__(self, *args, **kwargs):
self.permission_update = Permissions.Product_Group_Edit
self.permission_delete = Permissions.Product_Group_Delete
self.deleted_objects = 1
- BaseClass.MemberEndpointTest.__init__(self, *args, **kwargs)
+ BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class LanguageTypeTest(BaseClass.RESTEndpointTest):
+class LanguageTypeTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2676,11 +2663,11 @@ def __init__(self, *args, **kwargs):
}
self.update_fields = {'color': 'blue'}
self.test_type = TestType.CONFIGURATION_PERMISSIONS
- self.deleted_objects = 2
+ self.deleted_objects = 1
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class LanguageTest(BaseClass.RESTEndpointTest):
+class LanguageTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2708,7 +2695,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ImportLanguagesTest(BaseClass.RESTEndpointTest):
+class ImportLanguagesTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2729,7 +2716,7 @@ def __del__(self: object):
self.payload['file'].close()
def test_create(self):
- BaseClass.RESTEndpointTest.test_create(self)
+ BaseClass.CreateRequestTest.test_create(self)
languages = Languages.objects.filter(product=1).order_by('language')
@@ -2750,7 +2737,7 @@ def test_create(self):
self.assertEqual(languages[1].code, 51056)
-class NotificationsTest(BaseClass.RESTEndpointTest):
+class NotificationsTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2838,7 +2825,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ConfigurationPermissionTest(BaseClass.RESTEndpointTest):
+class ConfigurationPermissionTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2850,7 +2837,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class CredentialMappingTest(BaseClass.RESTEndpointTest):
+class CredentialMappingTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2873,7 +2860,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class CredentialTest(BaseClass.RESTEndpointTest):
+class CredentialTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2895,7 +2882,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class TextQuestionTest(BaseClass.RESTEndpointTest):
+class TextQuestionTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2908,7 +2895,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ChoiceQuestionTest(BaseClass.RESTEndpointTest):
+class ChoiceQuestionTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2921,7 +2908,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class TextAnswerTest(BaseClass.RESTEndpointTest):
+class TextAnswerTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2934,7 +2921,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class ChoiceAnswerTest(BaseClass.RESTEndpointTest):
+class ChoiceAnswerTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2947,7 +2934,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class GeneralSurveyTest(BaseClass.RESTEndpointTest):
+class GeneralSurveyTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2960,7 +2947,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class EngagementSurveyTest(BaseClass.RESTEndpointTest):
+class EngagementSurveyTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2973,7 +2960,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class AnsweredSurveyTest(BaseClass.RESTEndpointTest):
+class AnsweredSurveyTest(BaseClass.BaseClassTest):
fixtures = ['questionnaire_testdata.json']
def __init__(self, *args, **kwargs):
@@ -2986,7 +2973,7 @@ def __init__(self, *args, **kwargs):
BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs)
-class AnnouncementTest(BaseClass.RESTEndpointTest):
+class AnnouncementTest(BaseClass.BaseClassTest):
fixtures = ['dojo_testdata.json']
def __init__(self, *args, **kwargs):
diff --git a/unittests/test_user_queries.py b/unittests/test_user_queries.py
index 3028f04ee84..591ff523073 100644
--- a/unittests/test_user_queries.py
+++ b/unittests/test_user_queries.py
@@ -59,21 +59,21 @@ def tearDown(self):
def test_user_none(self, mock_current_user):
mock_current_user.return_value = None
- self.assertQuerysetEqual(Dojo_User.objects.none(), get_authorized_users(Permissions.Product_View))
+ self.assertQuerySetEqual(Dojo_User.objects.none(), get_authorized_users(Permissions.Product_View))
@patch('dojo.user.queries.get_current_user')
def test_user_admin(self, mock_current_user):
mock_current_user.return_value = self.admin_user
users = Dojo_User.objects.all().order_by('first_name', 'last_name', 'username')
- self.assertQuerysetEqual(users, get_authorized_users(Permissions.Product_View))
+ self.assertQuerySetEqual(users, get_authorized_users(Permissions.Product_View))
@patch('dojo.user.queries.get_current_user')
def test_user_global_permission(self, mock_current_user):
mock_current_user.return_value = self.global_permission_user
users = Dojo_User.objects.all().order_by('first_name', 'last_name', 'username')
- self.assertQuerysetEqual(users, get_authorized_users(Permissions.Product_View))
+ self.assertQuerySetEqual(users, get_authorized_users(Permissions.Product_View))
@patch('dojo.user.queries.get_current_user')
@patch('dojo.product.queries.get_current_user')
@@ -82,4 +82,4 @@ def test_user_regular(self, mock_current_user_1, mock_current_user_2):
mock_current_user_2.return_value = self.regular_user
users = Dojo_User.objects.exclude(username='invisible_user').order_by('first_name', 'last_name', 'username')
- self.assertQuerysetEqual(users, get_authorized_users(Permissions.Product_View))
+ self.assertQuerySetEqual(users, get_authorized_users(Permissions.Product_View))
diff --git a/unittests/tools/test_deepfence_threatmapper_parser.py b/unittests/tools/test_deepfence_threatmapper_parser.py
new file mode 100644
index 00000000000..2dc584b2259
--- /dev/null
+++ b/unittests/tools/test_deepfence_threatmapper_parser.py
@@ -0,0 +1,43 @@
+from dojo.models import Test
+from dojo.tools.deepfence_threatmapper.parser import DeepfenceThreatmapperParser
+
+from ..dojo_test_case import DojoTestCase
+
+
+class TestDeepfenceThreatmapperParser(DojoTestCase):
+
+ def test_parse_file_compliance_report(self):
+ testfile = open("unittests/scans/deepfence_threatmapper/compliance_report.xlsx", "rb")
+ parser = DeepfenceThreatmapperParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(7, len(findings))
+ self.assertEqual(findings[0].title, "Threatmapper_Compliance_Report-gdpr_3.6")
+ self.assertEqual(findings[0].severity, "Info")
+
+ def test_parse_file_malware_report(self):
+ testfile = open("unittests/scans/deepfence_threatmapper/malware_report.xlsx", "rb")
+ parser = DeepfenceThreatmapperParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(9, len(findings))
+ self.assertEqual(findings[0].title, "MD5_Constants")
+ self.assertEqual(findings[0].severity, "Low")
+ self.assertEqual(findings[0].file_path, "/tmp/Deepfence/YaraHunter/df_db09257b02e615049e0aecc05be2dc2401735e67db4ab74225df777c62c39753/usr/sbin/mkfs.cramfs")
+
+ def test_parse_file_secret_report(self):
+ testfile = open("unittests/scans/deepfence_threatmapper/secret_report.xlsx", "rb")
+ parser = DeepfenceThreatmapperParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(7, len(findings))
+ self.assertEqual(findings[0].title, "Username and password in URI")
+ self.assertEqual(findings[0].severity, "High")
+ self.assertEqual(findings[0].file_path, "usr/share/doc/curl-8.3.0/TheArtOfHttpScripting.md")
+
+ def test_parse_file_vulnerability_report(self):
+ testfile = open("unittests/scans/deepfence_threatmapper/vulnerability_report.xlsx", "rb")
+ parser = DeepfenceThreatmapperParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(3, len(findings))
+ self.assertEqual(findings[0].title, "Threatmapper_Vuln_Report-CVE-2021-36084")
+ self.assertEqual(findings[0].severity, "Low")
+ self.assertEqual(findings[0].mitigation, "2.5-10.amzn2.0.1")
+ self.assertEqual(findings[0].cve, "CVE-2021-36084")
diff --git a/unittests/tools/test_kubescape_parser.py b/unittests/tools/test_kubescape_parser.py
index bccbed220a7..c68cb2f1f7b 100644
--- a/unittests/tools/test_kubescape_parser.py
+++ b/unittests/tools/test_kubescape_parser.py
@@ -4,7 +4,7 @@
from ..dojo_test_case import DojoTestCase, get_unit_tests_path
-class TestOrtParser(DojoTestCase):
+class TestKubescapeParser(DojoTestCase):
def test_parse_file_has_many_findings(self):
with open(get_unit_tests_path() + "/scans/kubescape/many_findings.json") as testfile:
parser = KubescapeParser()
@@ -15,10 +15,10 @@ def test_parse_file_has_many_results(self):
with open(get_unit_tests_path() + "/scans/kubescape/results.json") as testfile:
parser = KubescapeParser()
findings = parser.get_findings(testfile, Test())
- self.assertEqual(20, len(findings))
+ self.assertEqual(0, len(findings))
def test_parse_file_with_a_failure(self):
with open(get_unit_tests_path() + "/scans/kubescape/with_a_failure.json") as testfile:
parser = KubescapeParser()
findings = parser.get_findings(testfile, Test())
- self.assertEqual(18, len(findings))
+ self.assertEqual(3, len(findings))