Skip to content

Commit

Permalink
Merge pull request #10323 from DefectDojo/release/2.35.0
Browse files Browse the repository at this point in the history
Release: Merge release into master from: release/2.35.0
  • Loading branch information
Maffooch authored Jun 3, 2024
2 parents 7cd2279 + b9be46e commit 67a7571
Show file tree
Hide file tree
Showing 96 changed files with 2,861 additions and 1,520 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/k8s-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
uses: actions/checkout@v4

- name: Setup Minikube
uses: manusa/actions-setup-minikube@v2.10.0
uses: manusa/actions-setup-minikube@v2.11.0
with:
minikube version: 'v1.31.2'
kubernetes version: ${{ matrix.k8s }}
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.django-alpine
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Dockerfile.nginx to use the caching mechanism of Docker.

# Ref: https://devguide.python.org/#branchstatus
FROM python:3.11.3-alpine3.16@sha256:9efc6e155f287eb424ede74aeff198be75ae04504b1e42e87ec9f221e7410f2d as base
FROM python:3.11.9-alpine3.20@sha256:df44c0c0761ddbd6388f4549cab42d24d64d257c2a960ad5b276bb7dab9639c7 as base
FROM base as build
WORKDIR /app
RUN \
Expand Down
6 changes: 3 additions & 3 deletions Dockerfile.django-debian
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Dockerfile.nginx to use the caching mechanism of Docker.

# Ref: https://devguide.python.org/#branchstatus
FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as base
FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e as base
FROM base as build
WORKDIR /app
RUN \
Expand Down Expand Up @@ -43,10 +43,10 @@ RUN \
# ugly fix to install postgresql-client without errors
mkdir -p /usr/share/man/man1 /usr/share/man/man7 && \
apt-get -y install --no-install-recommends \
# libopenjp2-7 libjpeg62 libtiff5 are required by the pillow package
# libopenjp2-7 libjpeg62 libtiff are required by the pillow package
libopenjp2-7 \
libjpeg62 \
libtiff5 \
libtiff6 \
dnsutils \
default-mysql-client \
libmariadb3 \
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.integration-tests-debian
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@

# code: language=Dockerfile

FROM openapitools/openapi-generator-cli:v7.5.0@sha256:cdf11948948de9c21c6035de47dd5fc73c1651c8ba2ea0a4b86a527608ef52a9 as openapitools
FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as build
FROM openapitools/openapi-generator-cli:v7.6.0@sha256:f86ca824293602b71b9b66683cc0011f8ff963858bd853621c554ff5cc7dd1d5 as openapitools
FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e as build
WORKDIR /app
RUN \
apt-get -y update && \
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.nginx-alpine
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Dockerfile.django-alpine to use the caching mechanism of Docker.

# Ref: https://devguide.python.org/#branchstatus
FROM python:3.11.3-alpine3.16@sha256:9efc6e155f287eb424ede74aeff198be75ae04504b1e42e87ec9f221e7410f2d as base
FROM python:3.11.9-alpine3.20@sha256:df44c0c0761ddbd6388f4549cab42d24d64d257c2a960ad5b276bb7dab9639c7 as base
FROM base as build
WORKDIR /app
RUN \
Expand Down Expand Up @@ -140,7 +140,7 @@ COPY manage.py ./
COPY dojo/ ./dojo/
RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true

FROM nginx:1.26.0-alpine@sha256:ca16009a8c25f52193506d4c90c98efbad4b6cbe73372e2a27972f05c5e02f15
FROM nginx:1.27.0-alpine@sha256:69f8c2c72671490607f52122be2af27d4fc09657ff57e42045801aa93d2090f7
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.nginx-debian
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Dockerfile.django-debian to use the caching mechanism of Docker.

# Ref: https://devguide.python.org/#branchstatus
FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as base
FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e as base
FROM base as build
WORKDIR /app
RUN \
Expand Down Expand Up @@ -75,7 +75,7 @@ COPY dojo/ ./dojo/

RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true

FROM nginx:1.26.0-alpine@sha256:ca16009a8c25f52193506d4c90c98efbad4b6cbe73372e2a27972f05c5e02f15
FROM nginx:1.27.0-alpine@sha256:69f8c2c72671490607f52122be2af27d4fc09657ff57e42045801aa93d2090f7
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
Expand Down
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
"version": "2.34.5",
"version": "2.35.0",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
Expand Down
3 changes: 3 additions & 0 deletions docker-compose.override.debug.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,20 @@ services:
- '.:/app:z'
environment:
PYTHONWARNINGS: error # We are strict about Warnings during debugging
DD_DEBUG: 'True'
DD_EMAIL_URL: "smtp://mailhog:1025"
celerybeat:
environment:
PYTHONWARNINGS: error # We are strict about Warnings during debugging
DD_DEBUG: 'True'
volumes:
- '.:/app:z'
initializer:
volumes:
- '.:/app:z'
environment:
PYTHONWARNINGS: error # We are strict about Warnings during debugging
DD_DEBUG: 'True'
DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}"
DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}"
nginx:
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ services:
volumes:
- defectdojo_data:/var/lib/mysql
postgres:
image: postgres:16.2-alpine@sha256:951bfda460300925caa3949eaa092ba022e9aec191bbea9056a39e2382260b27
image: postgres:16.3-alpine@sha256:e89da2c083a5405943408b6807cd1fd25dc9010c1294e30611b841778bedc653
profiles:
- postgres-rabbitmq
- postgres-redis
Expand All @@ -155,7 +155,7 @@ services:
volumes:
- defectdojo_rabbitmq:/var/lib/rabbitmq
redis:
image: redis:7.2.4-alpine@sha256:a40e29800d387e3cf9431902e1e7a362e4d819233d68ae39380532c3310091ac
image: redis:7.2.5-alpine@sha256:0389bb8416d7c6ed065c25745179bf5d358e5d9472dd30a687ab36ffbb650262
profiles:
- mysql-redis
- postgres-redis
Expand Down
20 changes: 19 additions & 1 deletion docs/content/en/contributing/how-to-write-a-parser.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ $ docker-compose build --build-arg uid=1000

## Factory contract

Parser are loaded dynamicaly with a factory pattern. To have your parser loaded and works correctly, you need to implement the contract.
Parsers are loaded dynamicaly with a factory pattern. To have your parser loaded and works correctly, you need to implement the contract.

1. your parser **MUST** be in a sub-module of module `dojo.tools`
- ex: `dojo.tools.my_tool.parser` module
Expand Down Expand Up @@ -253,6 +253,24 @@ For ex:
self.assertEqual("TEST1", finding.vuln_id_from_tool)
```

### Use with to open example files

In order to make certain that file handles are closed properly, please use the with pattern to open files.
Instead of:
```python
testfile = open("path_to_file.json")
...
testfile.close()
```

use:
```python
with open("path_to_file.json") as testfile:
...
```

This ensures the file is closed at the end of the with statement, even if an exception occurs somewhere in the block.

### Test database

To test your unit tests locally, you first need to grant some rights. Get your MySQL root password from the docker-compose logs, login as root and issue the following commands:
Expand Down
12 changes: 12 additions & 0 deletions docs/content/en/getting_started/upgrading/2.35.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
title: 'Upgrading to DefectDojo Version 2.35.x'
toc_hide: true
weight: -20240506
description: Integrity checker announced
---

From 2.35.0, DefectDojo will perform an integrity check of the `settings.dist.py` file to ensure it has not been modified. If the user changed this file (in the past or even now) the DefectDojo instance will not start until those changes have been reverted.
Any customization of variables needs to be done via environmental variables or in 'local_settings.py'.
For more information check [Configuration documentation page](https://documentation.defectdojo.com/getting_started/configuration/).

There are no other special instructions for upgrading to 2.35.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.35.0) for the contents of the release.
14 changes: 14 additions & 0 deletions docs/content/en/integrations/parsers/file/coverity_scan.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
title: "Coverity Scan JSON Report"
toc_hide: true
---
### File Types
This DefectDojo parser accepts JSON files created from the [Synopsys Coverity CLI](https://www.synopsys.com/software-integrity/static-analysis-tools-sast/coverity.html) using the following command: `coverity scan`.

Documentation for CLI can be found [here](https://sig-product-docs.synopsys.com/bundle/coverity-docs/page/cli/topics/using_the_coverity_cli.html).

### Example Commands to retrieve JSON output
Run `coverity scan --project-dir <project_dir> --local <result_file> --local-format json` to create the JSON report.

### Sample Scan Data
Sample Coverity scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/coverity_scan).
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401

__version__ = '2.34.5'
__version__ = '2.35.0'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'https://documentation.defectdojo.com'
89 changes: 67 additions & 22 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@
Vulnerability_Id_Template,
get_current_date,
)
from dojo.risk_acceptance.helper import add_findings_to_risk_acceptance, remove_finding_from_risk_acceptance
from dojo.tools.factory import (
get_choices_sorted,
requires_file,
Expand Down Expand Up @@ -425,6 +426,7 @@ class Meta:


class UserSerializer(serializers.ModelSerializer):
date_joined = serializers.DateTimeField(read_only=True)
last_login = serializers.DateTimeField(read_only=True)
password = serializers.CharField(
write_only=True,
Expand All @@ -450,6 +452,7 @@ class Meta:
"first_name",
"last_name",
"email",
"date_joined",
"last_login",
"is_active",
"is_superuser",
Expand Down Expand Up @@ -1450,6 +1453,29 @@ class RiskAcceptanceSerializer(serializers.ModelSerializer):
decision = serializers.SerializerMethodField()
path = serializers.SerializerMethodField()

def create(self, validated_data):
instance = super().create(validated_data)
add_findings_to_risk_acceptance(instance, instance.accepted_findings.all())
return instance

def update(self, instance, validated_data):
# Determine findings to risk accept, and findings to unaccept risk
existing_findings = Finding.objects.filter(risk_acceptance=self.instance.id)
new_findings_ids = [x.id for x in validated_data.get("accepted_findings", [])]
new_findings = Finding.objects.filter(id__in=new_findings_ids)
findings_to_add = set(new_findings) - set(existing_findings)
findings_to_remove = set(existing_findings) - set(new_findings)
findings_to_add = Finding.objects.filter(id__in=[x.id for x in findings_to_add])
findings_to_remove = Finding.objects.filter(id__in=[x.id for x in findings_to_remove])
# Make the update in the database
instance = super().update(instance, validated_data)
# Add the new findings
add_findings_to_risk_acceptance(instance, findings_to_add)
# Remove the ones that were not present in the payload
for finding in findings_to_remove:
remove_finding_from_risk_acceptance(instance, finding)
return instance

@extend_schema_field(serializers.CharField())
def get_recommendation(self, obj):
return Risk_Acceptance.TREATMENT_TRANSLATIONS.get(obj.recommendation)
Expand Down Expand Up @@ -1483,6 +1509,12 @@ def get_engagement(self, obj):
)

def validate(self, data):
def validate_findings_have_same_engagement(finding_objects: List[Finding]):
engagements = finding_objects.values_list('test__engagement__id', flat=True).distinct().count()
if engagements > 1:
msg = "You are not permitted to add findings from multiple engagements"
raise PermissionDenied(msg)

findings = data.get('accepted_findings', [])
findings_ids = [x.id for x in findings]
finding_objects = Finding.objects.filter(id__in=findings_ids)
Expand All @@ -1491,16 +1523,11 @@ def validate(self, data):
msg = "You are not permitted to add one or more selected findings to this risk acceptance"
raise PermissionDenied(msg)
if self.context["request"].method == "POST":
engagements = finding_objects.values_list('test__engagement__id', flat=True).distinct().count()
if engagements > 1:
msg = "You are not permitted to add findings to a distinct engagement"
raise PermissionDenied(msg)
validate_findings_have_same_engagement(finding_objects)
elif self.context['request'].method in ['PATCH', 'PUT']:
engagement = Engagement.objects.filter(risk_acceptance=self.instance.id).first()
findings = finding_objects.exclude(test__engagement__id=engagement.id)
if len(findings) > 0:
msg = "You are not permitted to add findings to a distinct engagement"
raise PermissionDenied(msg)
existing_findings = Finding.objects.filter(risk_acceptance=self.instance.id)
existing_and_new_findings = existing_findings | finding_objects
validate_findings_have_same_engagement(existing_and_new_findings)
return data

class Meta:
Expand Down Expand Up @@ -2139,7 +2166,7 @@ def set_context(
"""
context = dict(data)
# update some vars
context["scan"] = data.get("file", None)
context["scan"] = data.pop("file", None)
context["environment"] = Development_Environment.objects.get(
name=data.get("environment", "Development")
)
Expand Down Expand Up @@ -2199,12 +2226,15 @@ def process_auto_create_create_context(
# Raise an explicit drf exception here
raise ValidationError(str(e))

def get_importer(self) -> BaseImporter:
def get_importer(
self,
**kwargs: dict,
) -> BaseImporter:
"""
Returns a new instance of an importer that extends
the BaseImporter class
"""
return DefaultImporter()
return DefaultImporter(**kwargs)

def process_scan(
self,
Expand All @@ -2218,8 +2248,9 @@ def process_scan(
Raises exceptions in the event of an error
"""
try:
context["test"], _, _, _, _, _, _ = self.get_importer().process_scan(
**context,
importer = self.get_importer(**context)
context["test"], _, _, _, _, _, _ = importer.process_scan(
context.pop("scan", None)
)
# Update the response body with some new data
if test := context.get("test"):
Expand Down Expand Up @@ -2470,19 +2501,25 @@ def process_auto_create_create_context(
# Raise an explicit drf exception here
raise ValidationError(str(e))

def get_importer(self) -> BaseImporter:
def get_importer(
self,
**kwargs: dict,
) -> BaseImporter:
"""
Returns a new instance of an importer that extends
the BaseImporter class
"""
return DefaultImporter()
return DefaultImporter(**kwargs)

def get_reimporter(self) -> BaseImporter:
def get_reimporter(
self,
**kwargs: dict,
) -> BaseImporter:
"""
Returns a new instance of a reimporter that extends
the BaseImporter class
"""
return DefaultReImporter()
return DefaultReImporter(**kwargs)

def process_scan(
self,
Expand All @@ -2500,14 +2537,22 @@ def process_scan(
try:
if test := context.get("test"):
statistics_before = test.statistics
context["test"], _, _, _, _, _, test_import = self.get_reimporter().process_scan(**context)
context["test"], _, _, _, _, _, test_import = self.get_reimporter(
**context
).process_scan(
context.pop("scan", None)
)
if test_import:
statistics_delta = test_import.statistics
elif context.get("auto_create_context"):
# Attempt to create an engagement
logger.debug("reimport for non-existing test, using import to create new test")
context["engagement"] = auto_create_manager.get_or_create_engagement(**context)
context["test"], _, _, _, _, _, _ = self.get_importer().process_scan(**context)
context["test"], _, _, _, _, _, _ = self.get_importer(
**context
).process_scan(
context.pop("scan", None)
)
else:
msg = "A test could not be found!"
raise NotFound(msg)
Expand Down Expand Up @@ -2957,10 +3002,10 @@ class Meta:
def validate(self, data):
async_updating = getattr(self.instance, 'async_updating', None)
if async_updating:
for field in ['critical', 'high', 'medium', 'low']:
for field in ['critical', 'enforce_critical', 'high', 'enforce_high', 'medium', 'enforce_medium', 'low', 'enforce_low']:
old_days = getattr(self.instance, field, None)
new_days = data.get(field, None)
if old_days and new_days and (old_days != new_days):
if old_days is not None and new_days is not None and (old_days != new_days):
msg = 'Finding SLA expiration dates are currently being calculated. The SLA days for this SLA configuration cannot be changed until the calculation is complete.'
raise serializers.ValidationError(msg)
return data
Expand Down
Loading

0 comments on commit 67a7571

Please sign in to comment.